lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
Java | apache-2.0 | ece7a1a2beddcd5974e3de80e1753f0032b45a2f | 0 | twogee/ant-ivy,apache/ant-ivy,jaikiran/ant-ivy,apache/ant-ivy,sbt/ivy,twogee/ant-ivy,sbt/ivy,jaikiran/ant-ivy,jaikiran/ant-ivy,apache/ant-ivy,twogee/ant-ivy,jaikiran/ant-ivy,twogee/ant-ivy,apache/ant-ivy | /*
* This file is subject to the license found in LICENCE.TXT in the root directory of the project.
*
* version 1.3.1
*/
package fr.jayasoft.ivy.url;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.net.UnknownHostException;
import fr.jayasoft.ivy.util.CopyProgressListener;
import fr.jayasoft.ivy.util.FileUtil;
import fr.jayasoft.ivy.util.Message;
/**
* @author Xavier Hanin
* @author Christian Riege
*
*/
public class BasicURLHandler extends AbstractURLHandler {
private static interface HttpStatus {
static final int SC_OK = 200;
static final int SC_PROXY_AUTHENTICATION_REQUIRED = 407;
}
public BasicURLHandler() {
Message.debug("installing "+IvyAuthenticator.INSTANCE.getClass()); // do not remove, ensure IvyAuthenticator class loading!
}
public URLInfo getURLInfo(URL url) {
return getURLInfo(url, 0);
}
public URLInfo getURLInfo(URL url, int timeout) {
URLConnection con = null;
try {
con = url.openConnection();
if (con instanceof HttpURLConnection) {
int status = ((HttpURLConnection)con).getResponseCode();
if (status == HttpStatus.SC_OK) {
return new URLInfo(
true,
((HttpURLConnection)con).getContentLength(),
con.getLastModified()
);
}
if (status == HttpStatus.SC_PROXY_AUTHENTICATION_REQUIRED) {
Message.warn("Your proxy requires authentication.");
}else if (String.valueOf(status).startsWith("4")) {
Message.verbose("CLIENT ERROR: "+((HttpURLConnection)con).getResponseMessage()+" url="+url);
}else if (String.valueOf(status).startsWith("5")) {
Message.error("SERVER ERROR: "+((HttpURLConnection)con).getResponseMessage()+" url="+url);
}
Message.debug("HTTP response status: "+status+" url="+url);
} else {
int contentLength = con.getContentLength();
if (contentLength <= 0) {
return UNAVAILABLE;
} else {
return new URLInfo(
true,
contentLength,
con.getLastModified()
);
}
}
} catch (UnknownHostException e) {
Message.warn("Host " + e.getMessage() +" not found. url="+url);
Message.info("You probably access the destination server through a proxy server that is not well configured.");
} catch (IOException e) {
Message.error("Server access Error: "+e.getMessage()+" url="+url);
} finally {
disconnect(con);
}
return UNAVAILABLE;
}
public InputStream openStream(URL url) throws IOException {
URLConnection conn = null;
InputStream inStream = null;
try {
conn = url.openConnection();
inStream = conn.getInputStream();
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
byte[] buffer = new byte[4096];
int len;
while ((len = inStream.read(buffer)) > 0) {
outStream.write(buffer, 0, len);
}
return new ByteArrayInputStream(outStream.toByteArray());
}
finally {
if (inStream != null) {
inStream.close();
}
disconnect(conn);
}
}
public void download(URL src, File dest, CopyProgressListener l) throws IOException {
URLConnection srcConn = null;
try {
srcConn = src.openConnection();
int contentLength = srcConn.getContentLength();
FileUtil.copy(srcConn.getInputStream(), dest, l);
if (dest.length() != contentLength && contentLength != -1) {
dest.delete();
throw new IOException("Downloaded file size doesn't match expected Content Length for "+src+". Please retry.");
}
}
finally {
disconnect(srcConn);
}
}
private void disconnect(URLConnection con) {
if (con instanceof HttpURLConnection) {
((HttpURLConnection)con).disconnect();
} else if (con != null && "sun.net.www.protocol.file.FileURLConnection".equals(con.getClass().getName())) {
// ugly fix for a sun jre bug:
// http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4257700
//
// getting header info on the fileurlconnection opens the connection,
// which opens a file input stream without closing it.
try {con.getInputStream().close();} catch (IOException e) {}
}
}
}
| src/java/fr/jayasoft/ivy/url/BasicURLHandler.java | /*
* This file is subject to the license found in LICENCE.TXT in the root directory of the project.
*
* version 1.3.1
*/
package fr.jayasoft.ivy.url;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.net.UnknownHostException;
import fr.jayasoft.ivy.util.CopyProgressListener;
import fr.jayasoft.ivy.util.FileUtil;
import fr.jayasoft.ivy.util.Message;
/**
* @author Xavier Hanin
* @author Christian Riege
*
*/
public class BasicURLHandler extends AbstractURLHandler {
private static interface HttpStatus {
static final int SC_OK = 200;
static final int SC_PROXY_AUTHENTICATION_REQUIRED = 407;
}
public BasicURLHandler() {
Message.debug("installing "+IvyAuthenticator.INSTANCE.getClass()); // do not remove, ensure IvyAuthenticator class loading!
}
public URLInfo getURLInfo(URL url) {
return getURLInfo(url, 0);
}
public URLInfo getURLInfo(URL url, int timeout) {
URLConnection con = null;
try {
con = url.openConnection();
if (con instanceof HttpURLConnection) {
int status = ((HttpURLConnection)con).getResponseCode();
if (status == HttpStatus.SC_OK) {
return new URLInfo(
true,
((HttpURLConnection)con).getContentLength(),
con.getLastModified()
);
}
if (status == HttpStatus.SC_PROXY_AUTHENTICATION_REQUIRED) {
Message.warn("Your proxy requires authentication.");
}else if (String.valueOf(status).startsWith("4")) {
Message.verbose("CLIENT ERROR: "+((HttpURLConnection)con).getResponseMessage()+" url="+url);
}else if (String.valueOf(status).startsWith("5")) {
Message.error("SERVER ERROR: "+((HttpURLConnection)con).getResponseMessage()+" url="+url);
}
Message.debug("HTTP response status: "+status+" url="+url);
} else {
int contentLength = con.getContentLength();
if (contentLength <= 0) {
return UNAVAILABLE;
} else {
return new URLInfo(
true,
contentLength,
con.getLastModified()
);
}
}
} catch (UnknownHostException e) {
Message.warn("Host " + e.getMessage() +" not found. url="+url);
Message.info("You probably access the destination server through a proxy server that is not well configured.");
} catch (IOException e) {
Message.error("Server access Error: "+e.getMessage()+" url="+url);
} finally {
if (con instanceof HttpURLConnection) {
((HttpURLConnection)con).disconnect();
}
}
return UNAVAILABLE;
}
public InputStream openStream(URL url) throws IOException {
URLConnection conn = null;
InputStream inStream = null;
try {
conn = url.openConnection();
inStream = conn.getInputStream();
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
byte[] buffer = new byte[4096];
int len;
while ((len = inStream.read(buffer)) > 0) {
outStream.write(buffer, 0, len);
}
return new ByteArrayInputStream(outStream.toByteArray());
}
finally {
if (inStream != null) {
inStream.close();
}
if (conn != null) {
if (conn instanceof HttpURLConnection) {
//System.out.println("Closing HttpURLConnection");
((HttpURLConnection) conn).disconnect();
}
}
}
}
public void download(URL src, File dest, CopyProgressListener l) throws IOException {
URLConnection srcConn = null;
try {
srcConn = src.openConnection();
int contentLength = srcConn.getContentLength();
FileUtil.copy(srcConn.getInputStream(), dest, l);
if (dest.length() != contentLength && contentLength != -1) {
dest.delete();
throw new IOException("Downloaded file size doesn't match expected Content Length for "+src+". Please retry.");
}
}
finally {
if (srcConn != null) {
if (srcConn instanceof HttpURLConnection) {
//System.out.println("Closing HttpURLConnection");
((HttpURLConnection) srcConn).disconnect();
}
}
}
}
}
| review disconnection: workaround for sun jre bug in file url connection
git-svn-id: 29f0868cb983d9404d09bb30d964f948aae571fc@488010 13f79535-47bb-0310-9956-ffa450edef68
| src/java/fr/jayasoft/ivy/url/BasicURLHandler.java | review disconnection: workaround for sun jre bug in file url connection | <ide><path>rc/java/fr/jayasoft/ivy/url/BasicURLHandler.java
<ide> } catch (IOException e) {
<ide> Message.error("Server access Error: "+e.getMessage()+" url="+url);
<ide> } finally {
<del> if (con instanceof HttpURLConnection) {
<del> ((HttpURLConnection)con).disconnect();
<del> }
<add> disconnect(con);
<ide> }
<ide> return UNAVAILABLE;
<ide> }
<del>
<add>
<ide> public InputStream openStream(URL url) throws IOException {
<ide> URLConnection conn = null;
<ide> InputStream inStream = null;
<ide> inStream.close();
<ide> }
<ide>
<del> if (conn != null) {
<del> if (conn instanceof HttpURLConnection) {
<del> //System.out.println("Closing HttpURLConnection");
<del> ((HttpURLConnection) conn).disconnect();
<del> }
<del> }
<add> disconnect(conn);
<ide> }
<ide> }
<ide> public void download(URL src, File dest, CopyProgressListener l) throws IOException {
<ide> }
<ide> }
<ide> finally {
<del> if (srcConn != null) {
<del> if (srcConn instanceof HttpURLConnection) {
<del> //System.out.println("Closing HttpURLConnection");
<del> ((HttpURLConnection) srcConn).disconnect();
<del> }
<del> }
<add> disconnect(srcConn);
<ide> }
<ide> }
<add>
<add> private void disconnect(URLConnection con) {
<add> if (con instanceof HttpURLConnection) {
<add> ((HttpURLConnection)con).disconnect();
<add> } else if (con != null && "sun.net.www.protocol.file.FileURLConnection".equals(con.getClass().getName())) {
<add> // ugly fix for a sun jre bug:
<add> // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4257700
<add> //
<add> // getting header info on the fileurlconnection opens the connection,
<add> // which opens a file input stream without closing it.
<add> try {con.getInputStream().close();} catch (IOException e) {}
<add> }
<add> }
<add>
<ide> } |
|
Java | agpl-3.0 | e3e4cf988445bb1732e9fa999750a59d4952da7f | 0 | jhonMalcom79/mcMMO_pers,isokissa3/mcMMO,virustotalop/mcMMO,EvilOlaf/mcMMO,Maximvdw/mcMMO | package com.gmail.nossr50.config.treasure;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.bukkit.Material;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.inventory.ItemStack;
import org.bukkit.material.MaterialData;
import com.gmail.nossr50.config.ConfigLoader;
import com.gmail.nossr50.datatypes.treasure.ExcavationTreasure;
import com.gmail.nossr50.datatypes.treasure.FishingTreasure;
import com.gmail.nossr50.datatypes.treasure.HylianTreasure;
import com.gmail.nossr50.datatypes.treasure.Treasure;
public class TreasureConfig extends ConfigLoader {
private static TreasureConfig instance;
public List<ExcavationTreasure> excavationFromDirt = new ArrayList<ExcavationTreasure>();
public List<ExcavationTreasure> excavationFromGrass = new ArrayList<ExcavationTreasure>();
public List<ExcavationTreasure> excavationFromSand = new ArrayList<ExcavationTreasure>();
public List<ExcavationTreasure> excavationFromGravel = new ArrayList<ExcavationTreasure>();
public List<ExcavationTreasure> excavationFromClay = new ArrayList<ExcavationTreasure>();
public List<ExcavationTreasure> excavationFromMycel = new ArrayList<ExcavationTreasure>();
public List<ExcavationTreasure> excavationFromSoulSand = new ArrayList<ExcavationTreasure>();
public List<HylianTreasure> hylianFromBushes = new ArrayList<HylianTreasure>();
public List<HylianTreasure> hylianFromFlowers = new ArrayList<HylianTreasure>();
public List<HylianTreasure> hylianFromPots = new ArrayList<HylianTreasure>();
public List<FishingTreasure> fishingRewards = new ArrayList<FishingTreasure>();
private TreasureConfig() {
super("treasures.yml");
loadKeys();
}
public static TreasureConfig getInstance() {
if (instance == null) {
instance = new TreasureConfig();
}
return instance;
}
@Override
protected void loadKeys() {
Map<String, Treasure> treasures = new HashMap<String, Treasure>();
ConfigurationSection treasureSection = config.getConfigurationSection("Treasures");
if (treasureSection == null) {
return;
}
Set<String> treasureConfigSet = treasureSection.getKeys(false);
for (String treasureName : treasureConfigSet) {
// Validate all the things!
List<String> reason = new ArrayList<String>();
/*
* ID, Amount, and Data
*/
if (!config.contains("Treasures." + treasureName + ".ID")) {
reason.add("Missing ID");
}
if (!config.contains("Treasures." + treasureName + ".Amount")) {
reason.add("Missing Amount");
}
if (!config.contains("Treasures." + treasureName + ".Data")) {
reason.add("Missing Data");
}
int id = config.getInt("Treasures." + treasureName + ".ID");
int amount = config.getInt("Treasures." + treasureName + ".Amount");
int data = config.getInt("Treasures." + treasureName + ".Data");
if (Material.getMaterial(id) == null) {
reason.add("Invalid id: " + id);
}
if (amount < 1) {
reason.add("Invalid amount: " + amount);
}
if (data > 127 || data < -128) {
reason.add("Invalid data: " + data);
}
/*
* XP, Drop Chance, and Drop Level
*/
if (!config.contains("Treasures." + treasureName + ".XP")) {
reason.add("Missing XP");
}
if (!config.contains("Treasures." + treasureName + ".Drop_Chance")) {
reason.add("Missing Drop_Chance");
}
if (!config.contains("Treasures." + treasureName + ".Drop_Level")) {
reason.add("Missing Drop_Level");
}
int xp = config.getInt("Treasures." + treasureName + ".XP");
Double dropChance = config.getDouble("Treasures." + treasureName + ".Drop_Chance");
int dropLevel = config.getInt("Treasures." + treasureName + ".Drop_Level");
if (xp < 0) {
reason.add("Invalid xp: " + xp);
}
if (dropChance < 0) {
reason.add("Invalid Drop_Chance: " + dropChance);
}
if (dropLevel < 0) {
reason.add("Invalid Drop_Level: " + dropLevel);
}
/*
* Drops From & Max Level
*/
ItemStack item = (new MaterialData(id, (byte) data)).toItemStack(amount);
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Fishing", false)) {
if (config.getConfigurationSection("Treasures." + treasureName + ".Drops_From").getKeys(false).size() != 1) {
reason.add("Fishing drops cannot also be excavation drops");
}
if (!config.contains("Treasures." + treasureName + ".Max_Level")) {
reason.add("Missing Max_Level");
}
int maxLevel = config.getInt("Treasures." + treasureName + ".Max_Level");
if (noErrorsInTreasure(reason)) {
FishingTreasure fTreasure = new FishingTreasure(item, xp, dropChance, dropLevel, maxLevel);
treasures.put(treasureName, fTreasure);
}
}
else {
ExcavationTreasure eTreasure = new ExcavationTreasure(item, xp, dropChance, dropLevel);
HylianTreasure hTreasure = new HylianTreasure(item, xp, dropChance, dropLevel);
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Dirt", false)) {
eTreasure.setDropsFromDirt();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Grass", false)) {
eTreasure.setDropsFromGrass();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Sand", false)) {
eTreasure.setDropsFromSand();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Gravel", false)) {
eTreasure.setDropsFromGravel();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Clay", false)) {
eTreasure.setDropsFromClay();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Mycelium", false)) {
eTreasure.setDropsFromMycel();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Soul_Sand", false)) {
eTreasure.setDropsFromSoulSand();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Bushes", false)) {
hTreasure.setDropsFromBushes();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Flowers", false)) {
hTreasure.setDropsFromFlowers();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Pots", false)) {
hTreasure.setDropsFromPots();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Fishing", false)) {
reason.add("Excavation drops cannot also be fishing drops");
}
if (noErrorsInTreasure(reason) && hTreasure.getDropsFrom() == (byte) 0x0) {
treasures.put(treasureName, eTreasure);
}
else if (noErrorsInTreasure(reason) && eTreasure.getDropsFrom() == (byte) 0x0) {
treasures.put(treasureName, hTreasure);
}
}
}
List<String> excavationTreasures = config.getStringList("Excavation.Treasure");
List<String> fishingTreasures = config.getStringList("Fishing.Treasure");
List<String> hylianTreasures = config.getStringList("Hylian_Luck.Treasure");
for (Entry<String, Treasure> nextEntry : treasures.entrySet()) {
String treasureKey = nextEntry.getKey();
Treasure treasure = nextEntry.getValue();
if (treasure instanceof FishingTreasure) {
if (fishingTreasures == null || !fishingTreasures.contains(treasureKey)) {
continue;
}
fishingRewards.add((FishingTreasure) treasure);
}
else if (treasure instanceof HylianTreasure) {
if (hylianTreasures == null || !hylianTreasures.contains(treasureKey)) {
continue;
}
HylianTreasure hTreasure = (HylianTreasure) treasure;
if (hTreasure.getDropsFromBushes()) {
hylianFromBushes.add(hTreasure);
}
if (hTreasure.getDropsFromFlowers()) {
hylianFromFlowers.add(hTreasure);
}
if (hTreasure.getDropsFromPots()) {
hylianFromPots.add(hTreasure);
}
}
else if (treasure instanceof ExcavationTreasure) {
if (excavationTreasures == null || !excavationTreasures.contains(treasureKey)) {
continue;
}
ExcavationTreasure eTreasure = (ExcavationTreasure) treasure;
if (eTreasure.getDropsFromDirt()) {
excavationFromDirt.add(eTreasure);
}
if (eTreasure.getDropsFromGrass()) {
excavationFromGrass.add(eTreasure);
}
if (eTreasure.getDropsFromSand()) {
excavationFromSand.add(eTreasure);
}
if (eTreasure.getDropsFromGravel()) {
excavationFromGravel.add(eTreasure);
}
if (eTreasure.getDropsFromClay()) {
excavationFromClay.add(eTreasure);
}
if (eTreasure.getDropsFromMycel()) {
excavationFromMycel.add(eTreasure);
}
if (eTreasure.getDropsFromSoulSand()) {
excavationFromSoulSand.add(eTreasure);
}
}
}
}
private boolean noErrorsInTreasure(List<String> issues) {
if (issues.isEmpty()) {
return true;
}
for (String issue : issues) {
plugin.getLogger().warning(issue);
}
return false;
}
}
| src/main/java/com/gmail/nossr50/config/treasure/TreasureConfig.java | package com.gmail.nossr50.config.treasure;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.bukkit.Material;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.inventory.ItemStack;
import org.bukkit.material.MaterialData;
import com.gmail.nossr50.config.ConfigLoader;
import com.gmail.nossr50.datatypes.treasure.ExcavationTreasure;
import com.gmail.nossr50.datatypes.treasure.FishingTreasure;
import com.gmail.nossr50.datatypes.treasure.HylianTreasure;
import com.gmail.nossr50.datatypes.treasure.Treasure;
public class TreasureConfig extends ConfigLoader {
private static TreasureConfig instance;
public List<ExcavationTreasure> excavationFromDirt = new ArrayList<ExcavationTreasure>();
public List<ExcavationTreasure> excavationFromGrass = new ArrayList<ExcavationTreasure>();
public List<ExcavationTreasure> excavationFromSand = new ArrayList<ExcavationTreasure>();
public List<ExcavationTreasure> excavationFromGravel = new ArrayList<ExcavationTreasure>();
public List<ExcavationTreasure> excavationFromClay = new ArrayList<ExcavationTreasure>();
public List<ExcavationTreasure> excavationFromMycel = new ArrayList<ExcavationTreasure>();
public List<ExcavationTreasure> excavationFromSoulSand = new ArrayList<ExcavationTreasure>();
public List<HylianTreasure> hylianFromBushes = new ArrayList<HylianTreasure>();
public List<HylianTreasure> hylianFromFlowers = new ArrayList<HylianTreasure>();
public List<HylianTreasure> hylianFromPots = new ArrayList<HylianTreasure>();
public List<FishingTreasure> fishingRewards = new ArrayList<FishingTreasure>();
private TreasureConfig() {
super("treasures.yml");
loadKeys();
}
public static TreasureConfig getInstance() {
if (instance == null) {
instance = new TreasureConfig();
}
return instance;
}
@Override
protected void loadKeys() {
Map<String, Treasure> treasures = new HashMap<String, Treasure>();
ConfigurationSection treasureSection = config.getConfigurationSection("Treasures");
Set<String> treasureConfigSet = treasureSection.getKeys(false);
for (String treasureName : treasureConfigSet) {
// Validate all the things!
List<String> reason = new ArrayList<String>();
/*
* ID, Amount, and Data
*/
if (!config.contains("Treasures." + treasureName + ".ID")) {
reason.add("Missing ID");
}
if (!config.contains("Treasures." + treasureName + ".Amount")) {
reason.add("Missing Amount");
}
if (!config.contains("Treasures." + treasureName + ".Data")) {
reason.add("Missing Data");
}
int id = config.getInt("Treasures." + treasureName + ".ID");
int amount = config.getInt("Treasures." + treasureName + ".Amount");
int data = config.getInt("Treasures." + treasureName + ".Data");
if (Material.getMaterial(id) == null) {
reason.add("Invalid id: " + id);
}
if (amount < 1) {
reason.add("Invalid amount: " + amount);
}
if (data > 127 || data < -128) {
reason.add("Invalid data: " + data);
}
/*
* XP, Drop Chance, and Drop Level
*/
if (!config.contains("Treasures." + treasureName + ".XP")) {
reason.add("Missing XP");
}
if (!config.contains("Treasures." + treasureName + ".Drop_Chance")) {
reason.add("Missing Drop_Chance");
}
if (!config.contains("Treasures." + treasureName + ".Drop_Level")) {
reason.add("Missing Drop_Level");
}
int xp = config.getInt("Treasures." + treasureName + ".XP");
Double dropChance = config.getDouble("Treasures." + treasureName + ".Drop_Chance");
int dropLevel = config.getInt("Treasures." + treasureName + ".Drop_Level");
if (xp < 0) {
reason.add("Invalid xp: " + xp);
}
if (dropChance < 0) {
reason.add("Invalid Drop_Chance: " + dropChance);
}
if (dropLevel < 0) {
reason.add("Invalid Drop_Level: " + dropLevel);
}
/*
* Drops From & Max Level
*/
ItemStack item = (new MaterialData(id, (byte) data)).toItemStack(amount);
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Fishing", false)) {
if (config.getConfigurationSection("Treasures." + treasureName + ".Drops_From").getKeys(false).size() != 1) {
reason.add("Fishing drops cannot also be excavation drops");
}
if (!config.contains("Treasures." + treasureName + ".Max_Level")) {
reason.add("Missing Max_Level");
}
int maxLevel = config.getInt("Treasures." + treasureName + ".Max_Level");
if (noErrorsInTreasure(reason)) {
FishingTreasure fTreasure = new FishingTreasure(item, xp, dropChance, dropLevel, maxLevel);
treasures.put(treasureName, fTreasure);
}
}
else {
ExcavationTreasure eTreasure = new ExcavationTreasure(item, xp, dropChance, dropLevel);
HylianTreasure hTreasure = new HylianTreasure(item, xp, dropChance, dropLevel);
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Dirt", false)) {
eTreasure.setDropsFromDirt();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Grass", false)) {
eTreasure.setDropsFromGrass();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Sand", false)) {
eTreasure.setDropsFromSand();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Gravel", false)) {
eTreasure.setDropsFromGravel();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Clay", false)) {
eTreasure.setDropsFromClay();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Mycelium", false)) {
eTreasure.setDropsFromMycel();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Soul_Sand", false)) {
eTreasure.setDropsFromSoulSand();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Bushes", false)) {
hTreasure.setDropsFromBushes();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Flowers", false)) {
hTreasure.setDropsFromFlowers();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Pots", false)) {
hTreasure.setDropsFromPots();
}
if (config.getBoolean("Treasures." + treasureName + ".Drops_From.Fishing", false)) {
reason.add("Excavation drops cannot also be fishing drops");
}
if (noErrorsInTreasure(reason) && hTreasure.getDropsFrom() == (byte) 0x0) {
treasures.put(treasureName, eTreasure);
}
else if (noErrorsInTreasure(reason) && eTreasure.getDropsFrom() == (byte) 0x0) {
treasures.put(treasureName, hTreasure);
}
}
}
List<String> excavationTreasures = config.getStringList("Excavation.Treasure");
List<String> fishingTreasures = config.getStringList("Fishing.Treasure");
List<String> hylianTreasures = config.getStringList("Hylian_Luck.Treasure");
for (Entry<String, Treasure> nextEntry : treasures.entrySet()) {
String treasureKey = nextEntry.getKey();
Treasure treasure = nextEntry.getValue();
if (treasure instanceof FishingTreasure) {
if (!fishingTreasures.contains(treasureKey)) {
continue;
}
fishingRewards.add((FishingTreasure) treasure);
}
else if (treasure instanceof HylianTreasure) {
if (!hylianTreasures.contains(treasureKey)) {
continue;
}
HylianTreasure hTreasure = (HylianTreasure) treasure;
if (hTreasure.getDropsFromBushes()) {
hylianFromBushes.add(hTreasure);
}
if (hTreasure.getDropsFromFlowers()) {
hylianFromFlowers.add(hTreasure);
}
if (hTreasure.getDropsFromPots()) {
hylianFromPots.add(hTreasure);
}
}
else if (treasure instanceof ExcavationTreasure) {
if (!excavationTreasures.contains(treasureKey)) {
continue;
}
ExcavationTreasure eTreasure = (ExcavationTreasure) treasure;
if (eTreasure.getDropsFromDirt()) {
excavationFromDirt.add(eTreasure);
}
if (eTreasure.getDropsFromGrass()) {
excavationFromGrass.add(eTreasure);
}
if (eTreasure.getDropsFromSand()) {
excavationFromSand.add(eTreasure);
}
if (eTreasure.getDropsFromGravel()) {
excavationFromGravel.add(eTreasure);
}
if (eTreasure.getDropsFromClay()) {
excavationFromClay.add(eTreasure);
}
if (eTreasure.getDropsFromMycel()) {
excavationFromMycel.add(eTreasure);
}
if (eTreasure.getDropsFromSoulSand()) {
excavationFromSoulSand.add(eTreasure);
}
}
}
}
private boolean noErrorsInTreasure(List<String> issues) {
if (issues.isEmpty()) {
return true;
}
for (String issue : issues) {
plugin.getLogger().warning(issue);
}
return false;
}
}
| Do not attempt to load an empty treasure config
| src/main/java/com/gmail/nossr50/config/treasure/TreasureConfig.java | Do not attempt to load an empty treasure config | <ide><path>rc/main/java/com/gmail/nossr50/config/treasure/TreasureConfig.java
<ide> protected void loadKeys() {
<ide> Map<String, Treasure> treasures = new HashMap<String, Treasure>();
<ide> ConfigurationSection treasureSection = config.getConfigurationSection("Treasures");
<add>
<add> if (treasureSection == null) {
<add> return;
<add> }
<add>
<ide> Set<String> treasureConfigSet = treasureSection.getKeys(false);
<ide>
<ide> for (String treasureName : treasureConfigSet) {
<ide> Treasure treasure = nextEntry.getValue();
<ide>
<ide> if (treasure instanceof FishingTreasure) {
<del> if (!fishingTreasures.contains(treasureKey)) {
<add> if (fishingTreasures == null || !fishingTreasures.contains(treasureKey)) {
<ide> continue;
<ide> }
<ide>
<ide> fishingRewards.add((FishingTreasure) treasure);
<ide> }
<ide> else if (treasure instanceof HylianTreasure) {
<del> if (!hylianTreasures.contains(treasureKey)) {
<add> if (hylianTreasures == null || !hylianTreasures.contains(treasureKey)) {
<ide> continue;
<ide> }
<ide>
<ide> }
<ide> }
<ide> else if (treasure instanceof ExcavationTreasure) {
<del> if (!excavationTreasures.contains(treasureKey)) {
<add> if (excavationTreasures == null || !excavationTreasures.contains(treasureKey)) {
<ide> continue;
<ide> }
<ide> |
|
Java | bsd-3-clause | 2db77ca78532eec7675e68945758fa72057b7e71 | 0 | Civcraft/BetterShards | package vg.civcraft.mc.bettershards;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.entity.Player;
import vg.civcraft.mc.bettershards.database.DatabaseManager;
import vg.civcraft.mc.bettershards.external.MercuryManager;
import vg.civcraft.mc.bettershards.portal.Portal;
public class PortalsManager {
private DatabaseManager db = BetterShardsPlugin.getInstance().getDatabaseManager();
private Map<String, Portal> portals;
private List<Player> arrivedPlayers = new ArrayList<Player>();
private MercuryManager mercManager;
public PortalsManager() {
super();
portals = new HashMap<String, Portal>();
mercManager = BetterShardsPlugin.getMercuryManager();
registerParticleRunnable();
}
public void loadPortalsManager() {
loadPortalsFromServer();
removeTeleportedPlayers();
autoSaveTimer();
}
public void createPortal(Portal portal){
portals.put(portal.getName(), portal);
db.addPortal(portal);
db.addPortalData(portal, null); // At this point it won't have a connection
}
public void deletePortal(Portal portal) {
deletePortalLocally(portal);
db.removePortalData(portal);
db.removePortalLoc(portal);
mercManager.sendPortalDelete(portal.getName());
}
public void deletePortalLocally(Portal portal) {
List <Portal> toRemove = new LinkedList<Portal>();
for(Entry <String, Portal> entry : portals.entrySet()) {
if (entry.getValue().getPartnerPortal() == portal) {
toRemove.add(entry.getValue());
}
}
for(Portal p : toRemove) {
p.setPartnerPortal(null);
}
portals.remove(portal.getName());
}
/*
* It is possible there may be one or more portals in an area but that would
* be stupid of admin to do as each player can only go to one server.
* So instead if it just finds one valid location what ever it may be
* that is the portal you will get.
*/
public Portal getPortal(Location loc){
for(Portal p : portals.values()) {
if (p.isOnCurrentServer() && p.inPortal(loc)) {
return p;
}
}
return null; // Like the evil that is nothingness.
}
public Portal getPortal(String name){
Portal p = portals.get(name);
if (p == null)
p = db.getPortal(name);
if (p == null)
return null;
portals.put(name, p);
return p;
}
/**
* Only loads portals from this server
*/
public void loadPortalsFromServer(){
List<World> worlds = Bukkit.getWorlds();
List<Portal> portals = db.getAllPortalsByWorld(worlds.toArray(new World[worlds.size()]));
for (Portal p: portals) {
this.portals.put(p.getName(), p);
}
}
private void removeTeleportedPlayers(){
Bukkit.getScheduler().scheduleSyncRepeatingTask(BetterShardsPlugin.getInstance(),
new Runnable() {
@Override
public void run() {
List<Player> toRemove = new ArrayList<Player>();
for (Player p: arrivedPlayers){
if (!p.isOnline()) {
toRemove.add(p);
continue;
}
Location loc = p.getLocation();
if (getPortal(loc) != null)
continue;
toRemove.add(p);
}
arrivedPlayers.removeAll(toRemove);
}
}, 100, 20);
}
public boolean canTransferPlayer(Player p) {
if (BetterShardsPlugin.getCombatTagManager().isInCombatTag(p)){
return false;
} else{
return !arrivedPlayers.contains(p);
}
}
public void addArrivedPlayer(Player p) {
arrivedPlayers.add(p);
}
// We want it sync incase a mercury message comes through we don't want it to override the db before
// mercury gets a chance to update the portal.
private void autoSaveTimer() {
Bukkit.getScheduler().scheduleSyncRepeatingTask(BetterShardsPlugin.getInstance(), new Runnable() {
@Override
public void run() {
Collection<Portal> ports = portals.values();
for (Portal p: ports) {
if (p.isDirty()) {
db.updatePortalData(p);
p.setDirty(false);
}
}
}
}, 500, 1000);
}
private void registerParticleRunnable() {
Bukkit.getScheduler().scheduleSyncRepeatingTask(BetterShardsPlugin.getInstance(), new Runnable() {
@Override
public void run() {
for(Portal portal : portals.values()) {
if (!portal.isOnCurrentServer()) {
continue;
}
for(Player p : Bukkit.getOnlinePlayers()) {
portal.showParticles(p);
}
}
}
}, 20L, 20L);
}
}
| BetterShardsBukkit/src/main/java/vg/civcraft/mc/bettershards/PortalsManager.java | package vg.civcraft.mc.bettershards;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.entity.Player;
import vg.civcraft.mc.bettershards.database.DatabaseManager;
import vg.civcraft.mc.bettershards.external.MercuryManager;
import vg.civcraft.mc.bettershards.portal.Portal;
public class PortalsManager {
private DatabaseManager db = BetterShardsPlugin.getInstance().getDatabaseManager();
private Map<String, Portal> portals;
private List<Player> arrivedPlayers = new ArrayList<Player>();
private MercuryManager mercManager;
public PortalsManager() {
super();
portals = new HashMap<String, Portal>();
mercManager = BetterShardsPlugin.getMercuryManager();
registerParticleRunnable();
}
public void loadPortalsManager() {
loadPortalsFromServer();
removeTeleportedPlayers();
autoSaveTimer();
}
public void createPortal(Portal portal){
portals.put(portal.getName(), portal);
db.addPortal(portal);
db.addPortalData(portal, null); // At this point it won't have a connection
}
public void deletePortal(Portal portal) {
deletePortalLocally(portal);
db.removePortalData(portal);
db.removePortalLoc(portal);
mercManager.sendPortalDelete(portal.getName());
}
public void deletePortalLocally(Portal portal) {
List <Portal> toRemove = new LinkedList<Portal>();
for(Entry <String, Portal> entry : portals.entrySet()) {
if (entry.getValue().getPartnerPortal() == portal) {
toRemove.add(entry.getValue());
}
}
for(Portal p : toRemove) {
p.setPartnerPortal(null);
}
portals.remove(portal.getName());
}
/*
* It is possible there may be one or more portals in an area but that would
* be stupid of admin to do as each player can only go to one server.
* So instead if it just finds one valid location what ever it may be
* that is the portal you will get.
*/
public Portal getPortal(Location loc){
for(Portal p : portals.values()) {
if (p.isOnCurrentServer() && p.inPortal(loc)) {
return p;
}
}
return null; // Like the evil that is nothingness.
}
public Portal getPortal(String name){
Portal p = portals.get(name);
if (p == null)
p = db.getPortal(name);
if (p == null)
return null;
portals.put(name, p);
return p;
}
/**
* Only loads portals from this server
*/
public void loadPortalsFromServer(){
List<World> worlds = Bukkit.getWorlds();
List<Portal> portals = db.getAllPortalsByWorld(worlds.toArray(new World[worlds.size()]));
for (Portal p: portals) {
this.portals.put(p.getName(), p);
}
}
private void removeTeleportedPlayers(){
Bukkit.getScheduler().scheduleSyncRepeatingTask(BetterShardsPlugin.getInstance(),
new Runnable() {
@Override
public void run() {
List<Player> toRemove = new ArrayList<Player>();
for (Player p: arrivedPlayers){
if (!p.isOnline()) {
toRemove.add(p);
continue;
}
Location loc = p.getLocation();
if (getPortal(loc) != null)
continue;
toRemove.add(p);
}
arrivedPlayers.removeAll(toRemove);
}
}, 100, 20);
}
public boolean canTransferPlayer(Player p) {
if (BetterShardsPlugin.getCombatTagManager().isInCombatTag(p)){
return false;
} else{
return !arrivedPlayers.contains(p);
}
}
public void addArrivedPlayer(Player p) {
arrivedPlayers.add(p);
}
// We want it sync incase a mercury message comes through we don't want it to override the db before
// mercury gets a chance to update the portal.
private void autoSaveTimer() {
Bukkit.getScheduler().scheduleSyncRepeatingTask(BetterShardsPlugin.getInstance(), new Runnable() {
@Override
public void run() {
Collection<Portal> ports = portals.values();
for (Portal p: ports) {
if (p.isDirty()) {
db.updatePortalData(p);
p.setDirty(false);
}
}
}
}, 500, 1000);
}
private void registerParticleRunnable() {
Bukkit.getScheduler().scheduleSyncRepeatingTask(BetterShardsPlugin.getInstance(), new Runnable() {
@Override
public void run() {
for(Portal portal : portals.values()) {
for(Player p : Bukkit.getOnlinePlayers()) {
portal.showParticles(p);
}
}
}
}, 20L, 20L);
}
}
| Only show particles for portals on the current server
| BetterShardsBukkit/src/main/java/vg/civcraft/mc/bettershards/PortalsManager.java | Only show particles for portals on the current server | <ide><path>etterShardsBukkit/src/main/java/vg/civcraft/mc/bettershards/PortalsManager.java
<ide> @Override
<ide> public void run() {
<ide> for(Portal portal : portals.values()) {
<add> if (!portal.isOnCurrentServer()) {
<add> continue;
<add> }
<ide> for(Player p : Bukkit.getOnlinePlayers()) {
<ide> portal.showParticles(p);
<ide> } |
|
Java | epl-1.0 | 6643a518937909986000ad20040133fd11fd1594 | 0 | boniatillo-com/PhaserEditor,boniatillo-com/PhaserEditor,boniatillo-com/PhaserEditor,boniatillo-com/PhaserEditor,boniatillo-com/PhaserEditor,boniatillo-com/PhaserEditor | // The MIT License (MIT)
//
// Copyright (c) 2015, 2017 Arian Fornaris
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions: The above copyright notice and this permission
// notice shall be included in all copies or substantial portions of the
// Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
package phasereditor.project.ui.wizards;
import java.util.HashMap;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IWorkspaceRoot;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.resources.WorkspaceJob;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.wizard.Wizard;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.ui.INewWizard;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkingSet;
import org.eclipse.ui.dialogs.WizardNewProjectCreationPage;
import org.json.JSONObject;
import phasereditor.inspect.core.InspectCore;
import phasereditor.inspect.core.templates.TemplateModel;
import phasereditor.project.core.ProjectCore;
import phasereditor.project.core.codegen.SourceLang;
/**
* @author arian
*
*/
public class NewPhaserProjectWizard extends Wizard implements INewWizard {
protected WizardNewProjectCreationPage _projectPage;
protected NewPhaserProjectSettingsWizardPage _settingsPage;
private IStructuredSelection _selection;
private IWorkbench _workbench;
public NewPhaserProjectWizard() {
}
@Override
public void init(IWorkbench workbench, IStructuredSelection selection) {
_workbench = workbench;
_selection = selection;
}
public IStructuredSelection getSelection() {
return _selection;
}
public IWorkbench getWorkbench() {
return _workbench;
}
@Override
public void addPages() {
_projectPage = new WizardNewProjectCreationPage("project") {
@Override
public void createControl(Composite parent) {
super.createControl(parent);
createWorkingSetGroup((Composite) getControl(),
getSelection(),
new String[] { "org.eclipse.ui.resourceWorkingSetPage" });
Dialog.applyDialogFont(getControl());
}
};
_projectPage.setTitle("New Phaser Project");
_projectPage.setDescription("Set the project name.");
{ // set initial name
IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot();
int i = 1;
while (root.getProject("Game" + i).exists()) {
i++;
}
_projectPage.setInitialProjectName("Game" + i);
}
_settingsPage = new NewPhaserProjectSettingsWizardPage();
addPage(_projectPage);
addPage(_settingsPage);
}
@Override
public void createPageControls(Composite pageContainer) {
super.createPageControls(pageContainer);
_settingsPage.setFocus();
}
@Override
public boolean performFinish() {
var params = new HashMap<String, String>();
IProject project = _projectPage.getProjectHandle();
int width = Integer.parseInt(_settingsPage.getWidthText().getText());
int height = Integer.parseInt(_settingsPage.getHeightText().getText());
params.put("title", project.getName());
var config = new JSONObject();
config.put("title", project.getName());
config.put("width", width);
config.put("height", height);
config.put("type", "#!@-" + _settingsPage.getTypeCombo().getText() + "#!@-");
config.put("backgroundColor", "#88F");
config.put("parent", "game-container");
boolean pixelArt = _settingsPage.getPixelArtBtn().getSelection();
if (pixelArt) {
var renderConfig = new JSONObject();
renderConfig.put("pixelArt", pixelArt);
config.put("render", renderConfig);
}
{
var physicsConfig = new JSONObject();
var physics = _settingsPage.getPhysics();
if (physics != null) {
physicsConfig.put("default", physics);
config.put("physics", physicsConfig);
}
}
{
var scaleConfig = new JSONObject();
var scaleMode = _settingsPage.getScaleMode();
if (scaleMode != null) {
scaleConfig.put("mode", "#!@-" + scaleMode + "#!@-");
}
var scaleAutoCenter = _settingsPage.getScaleAutoCenter();
if (scaleAutoCenter != null) {
scaleConfig.put("autoCenter", "#!@-" + scaleAutoCenter + "#!@-");
}
if (!scaleConfig.isEmpty()) {
config.put("scale", scaleConfig);
}
}
{
String str = config.toString(4);
str = str.replace("\"#!@-", "").replace("#!@-\"", "");
str = str.substring(0, str.length() - 2) + "\n\t}";
params.put("config", str);
}
SourceLang lang = _settingsPage.getSourceLang();
IWorkingSet[] workingSets = _projectPage.getSelectedWorkingSets();
new WorkspaceJob("Creating Phaser Project") {
@Override
public IStatus runInWorkspace(IProgressMonitor monitor) throws CoreException {
monitor.beginTask("Creating project", 4);
project.create(monitor);
monitor.worked(1);
project.open(monitor);
ProjectCore.setProjectSceneSize(project, width, height);
monitor.worked(1);
TemplateModel template;
var templId = "phasereditor.project.simplest";
if (lang == SourceLang.JAVA_SCRIPT_6) {
templId += ".js6";
} else if (lang == SourceLang.TYPE_SCRIPT) {
templId += ".typescript";
}
var workbench = getWorkbench();
template = InspectCore.getProjectTemplates().findById(templId);
ProjectCore.configureNewPhaserProject(project, template, params, lang, monitor);
monitor.worked(1);
workbench.getWorkingSetManager().addToWorkingSets(project, workingSets);
monitor.worked(1);
return Status.OK_STATUS;
}
}.schedule();
return true;
}
@Override
public boolean needsProgressMonitor() {
return true;
}
}
| source/v2/phasereditor/phasereditor.project.ui/src/phasereditor/project/ui/wizards/NewPhaserProjectWizard.java | // The MIT License (MIT)
//
// Copyright (c) 2015, 2017 Arian Fornaris
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions: The above copyright notice and this permission
// notice shall be included in all copies or substantial portions of the
// Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
package phasereditor.project.ui.wizards;
import java.util.HashMap;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IWorkspaceRoot;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.resources.WorkspaceJob;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.wizard.Wizard;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.ui.INewWizard;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkingSet;
import org.eclipse.ui.dialogs.WizardNewProjectCreationPage;
import org.json.JSONObject;
import phasereditor.inspect.core.InspectCore;
import phasereditor.inspect.core.templates.TemplateModel;
import phasereditor.project.core.ProjectCore;
import phasereditor.project.core.codegen.SourceLang;
/**
* @author arian
*
*/
public class NewPhaserProjectWizard extends Wizard implements INewWizard {
protected WizardNewProjectCreationPage _projectPage;
protected NewPhaserProjectSettingsWizardPage _settingsPage;
private IStructuredSelection _selection;
private IWorkbench _workbench;
public NewPhaserProjectWizard() {
}
@Override
public void init(IWorkbench workbench, IStructuredSelection selection) {
_workbench = workbench;
_selection = selection;
}
public IStructuredSelection getSelection() {
return _selection;
}
public IWorkbench getWorkbench() {
return _workbench;
}
@Override
public void addPages() {
_projectPage = new WizardNewProjectCreationPage("project") {
@Override
public void createControl(Composite parent) {
super.createControl(parent);
createWorkingSetGroup((Composite) getControl(),
getSelection(),
new String[] { "org.eclipse.ui.resourceWorkingSetPage" });
Dialog.applyDialogFont(getControl());
}
};
_projectPage.setTitle("New Phaser Project");
_projectPage.setDescription("Set the project name.");
{ // set initial name
IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot();
int i = 1;
while (root.getProject("Game" + i).exists()) {
i++;
}
_projectPage.setInitialProjectName("Game" + i);
}
_settingsPage = new NewPhaserProjectSettingsWizardPage();
addPage(_projectPage);
addPage(_settingsPage);
}
@Override
public void createPageControls(Composite pageContainer) {
super.createPageControls(pageContainer);
_settingsPage.setFocus();
}
@Override
public boolean performFinish() {
var params = new HashMap<String, String>();
IProject project = _projectPage.getProjectHandle();
int width = Integer.parseInt(_settingsPage.getWidthText().getText());
int height = Integer.parseInt(_settingsPage.getHeightText().getText());
params.put("title", project.getName());
boolean pixelArt = _settingsPage.getPixelArtBtn().getSelection();
var renderConfig = new JSONObject();
renderConfig.put("pixelArt", pixelArt);
var config = new JSONObject();
config.put("title", project.getName());
config.put("width", width);
config.put("height", height);
config.put("type", "#!@-" + _settingsPage.getTypeCombo().getText() + "#!@-");
config.put("backgroundColor", "#88F");
config.put("parent", "game-container");
{
var physicsConfig = new JSONObject();
var physics = _settingsPage.getPhysics();
if (physics != null) {
physicsConfig.put("default", physics);
config.put("physics", physicsConfig);
}
}
{
var scaleConfig = new JSONObject();
var scaleMode = _settingsPage.getScaleMode();
if (scaleMode != null) {
scaleConfig.put("mode", "#!@-" + scaleMode + "#!@-");
}
var scaleAutoCenter = _settingsPage.getScaleAutoCenter();
if (scaleAutoCenter != null) {
scaleConfig.put("autoCenter", "#!@-" + scaleAutoCenter + "#!@-");
}
if (!scaleConfig.isEmpty()) {
config.put("scale", scaleConfig);
}
}
{
String str = config.toString(4);
str = str.replace("\"#!@-", "").replace("#!@-\"", "");
str = str.substring(0, str.length() - 2) + "\n\t}";
params.put("config", str);
}
SourceLang lang = _settingsPage.getSourceLang();
IWorkingSet[] workingSets = _projectPage.getSelectedWorkingSets();
new WorkspaceJob("Creating Phaser Project") {
@Override
public IStatus runInWorkspace(IProgressMonitor monitor) throws CoreException {
monitor.beginTask("Creating project", 4);
project.create(monitor);
monitor.worked(1);
project.open(monitor);
ProjectCore.setProjectSceneSize(project, width, height);
monitor.worked(1);
TemplateModel template;
var templId = "phasereditor.project.simplest";
if (lang == SourceLang.JAVA_SCRIPT_6) {
templId += ".js6";
} else if (lang == SourceLang.TYPE_SCRIPT) {
templId += ".typescript";
}
var workbench = getWorkbench();
template = InspectCore.getProjectTemplates().findById(templId);
ProjectCore.configureNewPhaserProject(project, template, params, lang, monitor);
monitor.worked(1);
workbench.getWorkingSetManager().addToWorkingSets(project, workingSets);
monitor.worked(1);
return Status.OK_STATUS;
}
}.schedule();
return true;
}
@Override
public boolean needsProgressMonitor() {
return true;
}
}
| #115: New project wizard includes "pixelArt" config. | source/v2/phasereditor/phasereditor.project.ui/src/phasereditor/project/ui/wizards/NewPhaserProjectWizard.java | #115: New project wizard includes "pixelArt" config. | <ide><path>ource/v2/phasereditor/phasereditor.project.ui/src/phasereditor/project/ui/wizards/NewPhaserProjectWizard.java
<ide>
<ide> params.put("title", project.getName());
<ide>
<del> boolean pixelArt = _settingsPage.getPixelArtBtn().getSelection();
<del>
<del> var renderConfig = new JSONObject();
<del> renderConfig.put("pixelArt", pixelArt);
<del>
<ide> var config = new JSONObject();
<ide>
<ide> config.put("title", project.getName());
<ide> config.put("backgroundColor", "#88F");
<ide> config.put("parent", "game-container");
<ide>
<add> boolean pixelArt = _settingsPage.getPixelArtBtn().getSelection();
<add> if (pixelArt) {
<add> var renderConfig = new JSONObject();
<add> renderConfig.put("pixelArt", pixelArt);
<add>
<add> config.put("render", renderConfig);
<add> }
<add>
<ide> {
<ide> var physicsConfig = new JSONObject();
<ide> var physics = _settingsPage.getPhysics(); |
|
Java | apache-2.0 | 2dbafbb91003a6054ca973c7a02aa8f10cc5d72d | 0 | phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida | package ca.corefacility.bioinformatics.irida.service.analysis.workspace.galaxy.impl.integration;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.security.test.context.support.WithSecurityContextTestExcecutionListener;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestExecutionListeners;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.support.AnnotationConfigContextLoader;
import org.springframework.test.context.support.DependencyInjectionTestExecutionListener;
import com.github.jmchilton.blend4j.galaxy.GalaxyInstance;
import com.github.jmchilton.blend4j.galaxy.HistoriesClient;
import com.github.jmchilton.blend4j.galaxy.LibrariesClient;
import com.github.jmchilton.blend4j.galaxy.ToolsClient;
import com.github.jmchilton.blend4j.galaxy.WorkflowsClient;
import com.github.jmchilton.blend4j.galaxy.beans.History;
import com.github.jmchilton.blend4j.galaxy.beans.HistoryContents;
import com.github.jmchilton.blend4j.galaxy.beans.LibraryContent;
import com.github.jmchilton.blend4j.galaxy.beans.Workflow;
import com.github.jmchilton.blend4j.galaxy.beans.WorkflowInputs;
import com.github.jmchilton.blend4j.galaxy.beans.WorkflowInputs.WorkflowInput;
import com.github.springtestdbunit.DbUnitTestExecutionListener;
import com.github.springtestdbunit.annotation.DatabaseSetup;
import com.github.springtestdbunit.annotation.DatabaseTearDown;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import ca.corefacility.bioinformatics.irida.config.IridaApiGalaxyTestConfig;
import ca.corefacility.bioinformatics.irida.config.conditions.WindowsPlatformCondition;
import ca.corefacility.bioinformatics.irida.exceptions.DuplicateSampleException;
import ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerException;
import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowAnalysisTypeException;
import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowException;
import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowLoadException;
import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException;
import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowParameterException;
import ca.corefacility.bioinformatics.irida.exceptions.SampleAnalysisDuplicateException;
import ca.corefacility.bioinformatics.irida.exceptions.WorkflowException;
import ca.corefacility.bioinformatics.irida.exceptions.galaxy.GalaxyDatasetNotFoundException;
import ca.corefacility.bioinformatics.irida.model.enums.AnalysisState;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFile;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SingleEndSequenceFile;
import ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow;
import ca.corefacility.bioinformatics.irida.model.workflow.analysis.Analysis;
import ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisPhylogenomicsPipeline;
import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowParameter;
import ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.PreparedWorkflowGalaxy;
import ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.WorkflowInputsGalaxy;
import ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission;
import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.GalaxyHistoriesService;
import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.GalaxyLibrariesService;
import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.integration.LocalGalaxy;
import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.integration.Util;
import ca.corefacility.bioinformatics.irida.repositories.analysis.submission.AnalysisSubmissionRepository;
import ca.corefacility.bioinformatics.irida.repositories.sample.SampleRepository;
import ca.corefacility.bioinformatics.irida.service.DatabaseSetupGalaxyITService;
import ca.corefacility.bioinformatics.irida.service.SequencingObjectService;
import ca.corefacility.bioinformatics.irida.service.analysis.workspace.galaxy.AnalysisWorkspaceServiceGalaxy;
import ca.corefacility.bioinformatics.irida.service.sample.SampleService;
import ca.corefacility.bioinformatics.irida.service.workflow.IridaWorkflowsService;
/**
* Tests out preparing a workspace for execution of workflows in Galaxy.
*
*
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(loader = AnnotationConfigContextLoader.class, classes = { IridaApiGalaxyTestConfig.class })
@ActiveProfiles("test")
@TestExecutionListeners({ DependencyInjectionTestExecutionListener.class, DbUnitTestExecutionListener.class,
WithSecurityContextTestExcecutionListener.class })
@DatabaseSetup("/ca/corefacility/bioinformatics/irida/repositories/analysis/AnalysisRepositoryIT.xml")
@DatabaseTearDown("/ca/corefacility/bioinformatics/irida/test/integration/TableReset.xml")
public class AnalysisWorkspaceServiceGalaxyIT {
@Autowired
private DatabaseSetupGalaxyITService analysisExecutionGalaxyITService;
@Autowired
private LocalGalaxy localGalaxy;
@Autowired
private AnalysisWorkspaceServiceGalaxy analysisWorkspaceService;
@Autowired
private IridaWorkflowsService iridaWorkflowsService;
@Autowired
private AnalysisSubmissionRepository analysisSubmissionRepository;
@Autowired
private SampleService sampleService;
@Autowired
private SampleRepository sampleRepository;
@Autowired
private SequencingObjectService sequencingObjectService;
@Autowired
@Qualifier("rootTempDirectory")
private Path rootTempDirectory;
private GalaxyHistoriesService galaxyHistoriesService;
/**
* Timeout in seconds to stop polling a Galaxy library.
*/
private static final int LIBRARY_TIMEOUT = 5 * 60;
/**
* Polling time in seconds to poll a Galaxy library to check if
* datasets have been properly uploaded.
*/
private static final int LIBRARY_POLLING_TIME = 5;
private Path sequenceFilePathA;
private Path sequenceFilePath2A;
private Path sequenceFilePathB;
private Path sequenceFilePath2B;
private Path sequenceFilePath3;
private Path referenceFilePath;
private List<Path> pairSequenceFiles1A;
private List<Path> pairSequenceFiles2A;
private List<Path> pairSequenceFiles1AB;
private List<Path> pairSequenceFiles2AB;
private Set<SequencingObject> singleFileSet;
private static final UUID validWorkflowIdSingle = UUID.fromString("739f29ea-ae82-48b9-8914-3d2931405db6");
private static final UUID validWorkflowIdSingleSingleSample = UUID.fromString("a9692a52-5bc6-4da2-a89d-d880bb35bfe4");
private static final UUID validWorkflowIdPaired = UUID.fromString("ec93b50d-c9dd-4000-98fc-4a70d46ddd36");
private static final UUID validWorkflowIdPairedSingleSample = UUID.fromString("fc93b50d-c9dd-4000-98fc-4a70d46ddd36");
private static final UUID validWorkflowIdPairedWithParameters = UUID.fromString("23434bf8-e551-4efd-9957-e61c6f649f8b");
private static final UUID validWorkflowIdSinglePaired = UUID.fromString("d92e9918-1e3d-4dea-b2b9-089f1256ac1b");
private static final UUID phylogenomicsWorkflowId = UUID.fromString("1f9ea289-5053-4e4a-bc76-1f0c60b179f8");
private static final String OUTPUT1_KEY = "output1";
private static final String OUTPUT2_KEY = "output2";
private static final String OUTPUT1_NAME = "output1.txt";
private static final String OUTPUT2_NAME = "output2.txt";
private static final String MATRIX_NAME = "snpMatrix.tsv";
private static final String MATRIX_KEY = "matrix";
private static final String TREE_NAME = "phylogeneticTree.txt";
private static final String TREE_KEY = "tree";
private static final String TABLE_NAME = "snpTable.tsv";
private static final String TABLE_KEY = "table";
private static final String INPUTS_SINGLE_NAME = "irida_sequence_files_single";
private static final String INPUTS_PAIRED_NAME = "irida_sequence_files_paired";
private static final String SAMPLE1_NAME = "sample1";
/**
* Sets up variables for testing.
*
* @throws URISyntaxException
* @throws IOException
* @throws IridaWorkflowLoadException
*/
@Before
public void setup() throws URISyntaxException, IOException, IridaWorkflowLoadException {
Assume.assumeFalse(WindowsPlatformCondition.isWindows());
Path sequenceFilePathReal = Paths
.get(DatabaseSetupGalaxyITService.class.getResource("testData1.fastq").toURI());
Path referenceFilePathReal = Paths.get(DatabaseSetupGalaxyITService.class.getResource("testReference.fasta")
.toURI());
Path tempDir = Files.createTempDirectory(rootTempDirectory, "workspaceServiceGalaxyTest");
sequenceFilePathA = tempDir.resolve("testDataA_R1_001.fastq");
Files.copy(sequenceFilePathReal, sequenceFilePathA, StandardCopyOption.REPLACE_EXISTING);
sequenceFilePath2A = tempDir.resolve("testDataA_R2_001.fastq");
Files.copy(sequenceFilePathReal, sequenceFilePath2A, StandardCopyOption.REPLACE_EXISTING);
sequenceFilePathB = tempDir.resolve("testDataB_R1_001.fastq");
Files.copy(sequenceFilePathReal, sequenceFilePathB, StandardCopyOption.REPLACE_EXISTING);
sequenceFilePath2B = tempDir.resolve("testDataB_R2_001.fastq");
Files.copy(sequenceFilePathReal, sequenceFilePath2B, StandardCopyOption.REPLACE_EXISTING);
sequenceFilePath3 = tempDir.resolve("testData3_R1_001.fastq");
Files.copy(sequenceFilePathReal, sequenceFilePath3, StandardCopyOption.REPLACE_EXISTING);
referenceFilePath = Files.createTempFile("testReference", ".fasta");
Files.delete(referenceFilePath);
Files.copy(referenceFilePathReal, referenceFilePath);
singleFileSet = Sets.newHashSet(new SingleEndSequenceFile(new SequenceFile(sequenceFilePathA)));
GalaxyInstance galaxyInstanceAdmin = localGalaxy.getGalaxyInstanceAdmin();
HistoriesClient historiesClient = galaxyInstanceAdmin.getHistoriesClient();
ToolsClient toolsClient = galaxyInstanceAdmin.getToolsClient();
LibrariesClient librariesClient = galaxyInstanceAdmin.getLibrariesClient();
GalaxyLibrariesService galaxyLibrariesService = new GalaxyLibrariesService(librariesClient, LIBRARY_POLLING_TIME, LIBRARY_TIMEOUT, 1);
galaxyHistoriesService = new GalaxyHistoriesService(historiesClient, toolsClient, galaxyLibrariesService);
pairSequenceFiles1A = new ArrayList<>();
pairSequenceFiles1A.add(sequenceFilePathA);
pairSequenceFiles2A = new ArrayList<>();
pairSequenceFiles2A.add(sequenceFilePath2A);
pairSequenceFiles1AB = new ArrayList<>();
pairSequenceFiles1AB.add(sequenceFilePathA);
pairSequenceFiles1AB.add(sequenceFilePathB);
pairSequenceFiles2AB = new ArrayList<>();
pairSequenceFiles2AB.add(sequenceFilePath2A);
pairSequenceFiles2AB.add(sequenceFilePath2B);
}
/**
* Tests successfully preparing a workspace for analysis.
*
* @throws IridaWorkflowNotFoundException
* @throws ExecutionManagerException
*/
@Test
public void testPrepareAnalysisWorkspaceSuccess() throws IridaWorkflowNotFoundException, ExecutionManagerException {
AnalysisSubmission submission = AnalysisSubmission.builder(validWorkflowIdSingle)
.name("Name")
.inputFiles(singleFileSet)
.build();
assertNotNull("preparing an analysis workspace should not return null",
analysisWorkspaceService.prepareAnalysisWorkspace(submission));
}
/**
* Tests failure to prepare a workspace for analysis.
*
* @throws IridaWorkflowNotFoundException
* @throws ExecutionManagerException
*/
@Test(expected = IllegalArgumentException.class)
public void testPrepareAnalysisWorkspaceFail() throws IridaWorkflowNotFoundException, ExecutionManagerException {
AnalysisSubmission submission = AnalysisSubmission.builder(validWorkflowIdSingle)
.name("Name")
.inputFiles(singleFileSet)
.build();
submission.setRemoteAnalysisId("1");
analysisWorkspaceService.prepareAnalysisWorkspace(submission);
}
/**
* Tests out successfully preparing single workflow input files for
* execution.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesSingleSuccess() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesSingleSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
LibrariesClient librariesClient = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSingle);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
sequenceFilePathA, referenceFilePath, validWorkflowIdSingle);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(),
preparedWorkflow.getRemoteAnalysisId());
assertNotNull("the returned workflow inputs should not be null", preparedWorkflow.getWorkflowInputs());
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct library is created
List<LibraryContent> libraryContents = librariesClient.getLibraryContents(preparedWorkflow.getRemoteDataId());
Map<String, List<LibraryContent>> libraryContentsMap = libraryContents.stream().collect(Collectors.groupingBy(LibraryContent::getName));
assertFalse("the returned library should exist in Galaxy", libraryContentsMap.isEmpty());
String sequenceFileALibraryName = "/" + sequenceFilePathA.getFileName().toString();
assertEquals("the returned library does not contain the correct number of elements", 2,
libraryContentsMap.size());
assertTrue("the returned library does not contain a root folder", libraryContentsMap.containsKey("/"));
assertTrue("the returned library does not contain the correct sequence file",
libraryContentsMap.containsKey(sequenceFileALibraryName));
assertEquals("the returned library does not contain the correct sequence file", 1,
libraryContentsMap.get(sequenceFileALibraryName).size());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history should contain 3 entries", 3, historyContents.size());
Map<String, HistoryContents> contentsMap = historyContentsAsMap(historyContents);
assertTrue("the created history should contain the file " + sequenceFilePathA.toFile().getName(),
contentsMap.containsKey(sequenceFilePathA.toFile().getName()));
assertTrue("the created history should contain the file " + referenceFilePath.toFile().getName(),
contentsMap.containsKey(referenceFilePath.toFile().getName()));
assertTrue("the created history should contain the collection with name " + INPUTS_SINGLE_NAME,
contentsMap.containsKey(INPUTS_SINGLE_NAME));
// make sure workflow inputs contains correct information
Map<String, WorkflowInput> workflowInputsMap = preparedWorkflow.getWorkflowInputs().getInputsObject()
.getInputs();
assertEquals("the created workflow inputs has an invalid number of elements", 2, workflowInputsMap.size());
}
private Map<String, HistoryContents> historyContentsAsMap(List<HistoryContents> historyContents) {
return Maps.uniqueIndex(historyContents, historyContent -> historyContent.getName());
}
/**
* Tests out failing to prepare single workflow input files for execution
* (duplicate samples).
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test(expected = DuplicateSampleException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesSingleFail() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesSingleFail");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSingle);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
List<SingleEndSequenceFile> sequenceFiles = analysisExecutionGalaxyITService.setupSequencingObjectInDatabase(
1L, sequenceFilePathA, sequenceFilePath2A);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
Sets.newHashSet(sequenceFiles), referenceFilePath, validWorkflowIdSingle);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
}
/**
* Tests out successfully preparing paired workflow input files for
* execution.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesPairSuccess() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesPairSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
LibrariesClient librariesClient = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, validWorkflowIdPaired);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(),
preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct library is created
List<LibraryContent> libraryContents = librariesClient.getLibraryContents(preparedWorkflow.getRemoteDataId());
Map<String, List<LibraryContent>> libraryContentsMap = libraryContents.stream().collect(Collectors.groupingBy(LibraryContent::getName));
assertFalse("the returned library should exist in Galaxy", libraryContentsMap.isEmpty());
String sequenceFile1ALibraryName = "/" + sequenceFilePathA.getFileName().toString();
String sequenceFile2ALibraryName = "/" + sequenceFilePath2A.getFileName().toString();
assertEquals("the returned library does not contain the correct number of elements", 3,
libraryContentsMap.size());
assertTrue("the returned library does not contain a root folder", libraryContentsMap.containsKey("/"));
assertTrue("the returned library does not contain the correct sequence file",
libraryContentsMap.containsKey(sequenceFile1ALibraryName));
assertEquals("the returned library does not contain the correct sequence file", 1,
libraryContentsMap.get(sequenceFile1ALibraryName).size());
assertTrue("the returned library does not contain the correct sequence file",
libraryContentsMap.containsKey(sequenceFile2ALibraryName));
assertEquals("the returned library does not contain the correct sequence file", 1,
libraryContentsMap.get(sequenceFile2ALibraryName).size());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 4, historyContents.size());
Map<String, HistoryContents> contentsMap = historyContentsAsMap(historyContents);
assertTrue("the created history should contain the file " + sequenceFilePathA.toFile().getName(),
contentsMap.containsKey(sequenceFilePathA.toFile().getName()));
assertTrue("the created history should contain the file " + sequenceFilePath2A.toFile().getName(),
contentsMap.containsKey(sequenceFilePath2A.toFile().getName()));
assertTrue("the created history should contain the file " + referenceFilePath.toFile().getName(),
contentsMap.containsKey(referenceFilePath.toFile().getName()));
assertTrue("the created history should contain the collection with name " + INPUTS_PAIRED_NAME,
contentsMap.containsKey(INPUTS_PAIRED_NAME));
// make sure workflow inputs contains correct information
Map<String, WorkflowInput> workflowInputsMap = preparedWorkflow.getWorkflowInputs().getInputsObject()
.getInputs();
assertEquals("the created workflow inputs has an invalid number of elements", 2, workflowInputsMap.size());
}
/**
* Tests out successfully preparing paired workflow input files for
* execution with parameters.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesParametersSuccess() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesParametersSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedWithParameters);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
Map<String, String> parameters = ImmutableMap.of("coverage", "20");
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, parameters,
validWorkflowIdPairedWithParameters);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(),
preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 4, historyContents.size());
WorkflowInputs workflowInputs = preparedWorkflow.getWorkflowInputs().getInputsObject();
assertNotNull("created workflowInputs is null", workflowInputs);
Map<String, Object> toolParameters = workflowInputs.getParameters().get(
"core_pipeline_outputs_paired_with_parameters");
assertNotNull("toolParameters is null", toolParameters);
String coverageMinValue = (String) toolParameters.get("coverageMin");
assertEquals("coverageMinValue should have been changed", "20", coverageMinValue);
assertEquals("coverageMidValue should have been changed",
ImmutableMap.of("coverageMid", "20"), toolParameters.get("conditional"));
String coverageMaxValue = (String) toolParameters.get("coverageMin");
assertEquals("coverageMaxValue should have been changed", "20", coverageMaxValue);
}
/**
* Tests out successfully preparing paired workflow input files for
* execution, no parameters set.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesParametersSuccessWithNoParameters() throws InterruptedException,
ExecutionManagerException, IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesParametersSuccessWithNoParameters");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedWithParameters);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, validWorkflowIdPairedWithParameters);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(),
preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 4, historyContents.size());
WorkflowInputs workflowInputs = preparedWorkflow.getWorkflowInputs().getInputsObject();
assertNotNull("created workflowInputs is null", workflowInputs);
Map<String, Object> toolParameters = workflowInputs.getParameters().get(
"core_pipeline_outputs_paired_with_parameters");
assertNotNull("toolParameters is null", toolParameters);
String coverageMinValue = (String) toolParameters.get("coverageMin");
assertEquals("coverageMinValue should have been changed to default", "10", coverageMinValue);
assertEquals("coverageMidValue should have been changed to default",
ImmutableMap.of("coverageMid", "10"), toolParameters.get("conditional"));
String coverageMaxValue = (String) toolParameters.get("coverageMin");
assertEquals("coverageMaxValue should have been changed to default", "10", coverageMaxValue);
}
/**
* Tests out successfully preparing paired workflow input files for
* execution and ignoring default parameters.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesParametersSuccessIgnoreDefaultParameters() throws InterruptedException,
ExecutionManagerException, IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesParametersSuccessIgnoreDefaultParameters");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedWithParameters);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
Map<String, String> parameters = ImmutableMap.of("coverage", IridaWorkflowParameter.IGNORE_DEFAULT_VALUE);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, parameters, validWorkflowIdPairedWithParameters);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(),
preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 4, historyContents.size());
WorkflowInputs workflowInputs = preparedWorkflow.getWorkflowInputs().getInputsObject();
assertNotNull("created workflowInputs is null", workflowInputs);
Map<String, Object> toolParameters = workflowInputs.getParameters().get(
"core_pipeline_outputs_paired_with_parameters");
assertNull("toolParameters is not null", toolParameters);
}
/**
* Tests out failing to prepare paired workflow input files for execution
* with parameters due to an invalid parameter passed.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test(expected = IridaWorkflowParameterException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesParametersFailInvalidParameter() throws InterruptedException,
ExecutionManagerException, IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesParametersFailInvalidParameter");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedWithParameters);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
Map<String, String> parameters = ImmutableMap.of("invalid", "20");
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, parameters,
validWorkflowIdPairedWithParameters);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
}
/**
* Tests out failing to prepare paired workflow input files for execution
* (duplicate sample).
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test(expected = DuplicateSampleException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesPairFail() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesPairFail");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
// construct two pairs of sequence files with same sample (1L)
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
pairSequenceFiles1AB, pairSequenceFiles2AB, referenceFilePath, validWorkflowIdPaired);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
}
/**
* Tests out successfully preparing paired and single workflow input files
* for execution.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesSinglePairSuccess() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesPairSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSinglePaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService
.setupSinglePairSubmissionInDatabaseDifferentSample(1L, 2L, pairSequenceFiles1A, pairSequenceFiles2A,
sequenceFilePath3, referenceFilePath, validWorkflowIdSinglePaired);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(),
preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 6, historyContents.size());
Map<String, HistoryContents> contentsMap = historyContentsAsMap(historyContents);
assertTrue("the created history should contain the file " + sequenceFilePathA.toFile().getName(),
contentsMap.containsKey(sequenceFilePathA.toFile().getName()));
assertTrue("the created history should contain the file " + sequenceFilePath2A.toFile().getName(),
contentsMap.containsKey(sequenceFilePath2A.toFile().getName()));
assertTrue("the created history should contain the file " + sequenceFilePath3.toFile().getName(),
contentsMap.containsKey(sequenceFilePath3.toFile().getName()));
assertTrue("the created history should contain the file " + referenceFilePath.toFile().getName(),
contentsMap.containsKey(referenceFilePath.toFile().getName()));
assertTrue("the created history should contain a dataset collection with the name " + INPUTS_SINGLE_NAME,
contentsMap.containsKey(INPUTS_SINGLE_NAME));
assertTrue("the created history should contain a dataset collection with the name " + INPUTS_PAIRED_NAME,
contentsMap.containsKey(INPUTS_PAIRED_NAME));
// make sure workflow inputs contains correct information
Map<String, WorkflowInput> workflowInputsMap = preparedWorkflow.getWorkflowInputs().getInputsObject()
.getInputs();
assertEquals("the created workflow inputs has an invalid number of elements", 3, workflowInputsMap.size());
}
/**
* Tests out failing to prepare paired and single workflow input files for
* execution (duplicate samples among single and paired input files).
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test(expected = SampleAnalysisDuplicateException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesSinglePairFail() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesSinglePairFail");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSinglePaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService
.setupSinglePairSubmissionInDatabaseSameSample(1L, pairSequenceFiles1A, pairSequenceFiles2A,
sequenceFilePath3, referenceFilePath, validWorkflowIdSinglePaired);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
}
/**
* Tests out failure to prepare workflow input files for execution.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test(expected = WorkflowException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesFail() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesFail");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
History createdHistory = historiesClient.create(history);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
sequenceFilePathA, referenceFilePath, validWorkflowIdSingle);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId("invalid");
analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
}
private void uploadFileToHistory(Path filePath, String fileName, String historyId, ToolsClient toolsClient) {
ToolsClient.FileUploadRequest uploadRequest = new ToolsClient.FileUploadRequest(historyId, filePath.toFile());
uploadRequest.setDatasetName(fileName);
toolsClient.upload(uploadRequest);
}
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* consisting only of single end sequence reads.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
* @throws IridaWorkflowAnalysisLabelException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisSingleSuccess() throws InterruptedException,
ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException,
TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisSingleSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSingle);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
sequenceFilePathA, referenceFilePath, validWorkflowIdSingle);
Set<SingleEndSequenceFile> submittedSf = sequencingObjectService
.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SingleEndSequenceFile.class);
Set<SequenceFilePair> pairedFiles = sequencingObjectService
.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SequenceFilePair.class);
assertEquals("the created submission should have no paired input files", 0, pairedFiles.size());
assertEquals("the created submission should have 1 single input file", 1, submittedSf.size());
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT1_NAME, analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT2_NAME, analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* consisting only of single end sequence reads (for workflow accepting single sample).
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisSingleSingleSampleSuccess() throws InterruptedException,
ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException,
TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisSingleSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSingleSingleSample);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
sequenceFilePathA, referenceFilePath, validWorkflowIdSingleSingleSample);
Set<SingleEndSequenceFile> submittedSf = sequencingObjectService
.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SingleEndSequenceFile.class);
Set<SequenceFilePair> pairedFiles = sequencingObjectService
.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SequenceFilePair.class);
assertEquals("the created submission should have no paired input files", 0, pairedFiles.size());
assertEquals("the created submission should have 1 single input file", 1, submittedSf.size());
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", SAMPLE1_NAME + '-' + OUTPUT1_NAME, analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", SAMPLE1_NAME + '-' + OUTPUT2_NAME, analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* consisting only of paired sequence reads.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisPairedSuccess() throws InterruptedException,
ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException,
TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisPairedSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
List<Path> paths1 = new ArrayList<>();
paths1.add(sequenceFilePathA);
List<Path> paths2 = new ArrayList<>();
paths2.add(sequenceFilePath2A);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
paths1, paths2, referenceFilePath, validWorkflowIdPaired);
Set<SingleEndSequenceFile> submittedSingleFiles = sequencingObjectService
.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SingleEndSequenceFile.class);
Set<SequenceFilePair> pairedFiles = sequencingObjectService
.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SequenceFilePair.class);
assertEquals("the created submission should have no single input files", 0, submittedSingleFiles.size());
assertEquals("the created submission has an invalid number of paired input files", 1, pairedFiles.size());
SequenceFilePair submittedSp = pairedFiles.iterator().next();
Set<SequenceFile> submittedSf = submittedSp.getFiles();
assertEquals("the paired input should have 2 files", 2, submittedSf.size());
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT1_NAME, analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT2_NAME, analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* consisting only of paired sequence reads.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisPairedSingleSampleSuccess() throws InterruptedException,
ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException,
TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisPairedSingleSampleSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedSingleSample);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
List<Path> paths1 = new ArrayList<>();
paths1.add(sequenceFilePathA);
List<Path> paths2 = new ArrayList<>();
paths2.add(sequenceFilePath2A);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
paths1, paths2, referenceFilePath, validWorkflowIdPairedSingleSample);
Set<SingleEndSequenceFile> submittedSingleFiles = sequencingObjectService
.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SingleEndSequenceFile.class);
Set<SequenceFilePair> pairedFiles = sequencingObjectService
.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SequenceFilePair.class);
assertEquals("the created submission should have no single input files", 0, submittedSingleFiles.size());
assertEquals("the created submission has an invalid number of paired input files", 1, pairedFiles.size());
SequenceFilePair submittedSp = pairedFiles.iterator().next();
Set<SequenceFile> submittedSf = submittedSp.getFiles();
assertEquals("the paired input should have 2 files", 2, submittedSf.size());
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", SAMPLE1_NAME + "-" + OUTPUT1_NAME, analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", SAMPLE1_NAME + "-" + OUTPUT2_NAME, analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* when sequencing objects are present, but the sample was deleted while pipeline was running.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisDeleteSampleRunningSuccess() throws InterruptedException,
ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException,
TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisDeleteSampleRunningSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedSingleSample);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
List<Path> paths1 = new ArrayList<>();
paths1.add(sequenceFilePathA);
List<Path> paths2 = new ArrayList<>();
paths2.add(sequenceFilePath2A);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
paths1, paths2, referenceFilePath, validWorkflowIdPairedSingleSample);
sampleRepository.delete(1L);
assertTrue(!sampleService.exists(1L));
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT1_NAME, analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT2_NAME, analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* consisting of both single and paired sequence reads.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisSinglePairedSuccess() throws InterruptedException,
ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException,
TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisSinglePairedSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSinglePaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
List<Path> paths1 = new ArrayList<>();
paths1.add(sequenceFilePathA);
List<Path> paths2 = new ArrayList<>();
paths2.add(sequenceFilePath2A);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService
.setupSinglePairSubmissionInDatabaseSameSample(1L, paths1, paths2, sequenceFilePath3,
referenceFilePath, validWorkflowIdSinglePaired);
Set<SingleEndSequenceFile> singleFiles = sequencingObjectService
.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SingleEndSequenceFile.class);
Set<SequenceFilePair> pairedFiles = sequencingObjectService
.getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SequenceFilePair.class);
assertEquals("invalid number of single end input files", 1, singleFiles.size());
assertEquals("invalid number of paired end inputs", 1, pairedFiles.size());
SequenceFilePair submittedSp = pairedFiles.iterator().next();
Set<SequenceFile> submittedSf = submittedSp.getFiles();
assertEquals("invalid number of files for paired input", 2, submittedSf.size());
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT1_NAME, analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT2_NAME, analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
/**
* Tests out successfully getting results for an analysis (phylogenomics).
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsPhylogenomicsSuccess() throws InterruptedException, ExecutionManagerException,
IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException, TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsPhylogenomicsSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, TABLE_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, MATRIX_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, TREE_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(phylogenomicsWorkflowId);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
sequenceFilePathA, referenceFilePath, phylogenomicsWorkflowId);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", AnalysisPhylogenomicsPipeline.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 3, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(TABLE_NAME), analysis
.getAnalysisOutputFile(TABLE_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", TABLE_NAME, analysis
.getAnalysisOutputFile(TABLE_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(MATRIX_NAME), analysis
.getAnalysisOutputFile(MATRIX_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", MATRIX_NAME, analysis
.getAnalysisOutputFile(MATRIX_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(TREE_NAME), analysis
.getAnalysisOutputFile(TREE_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", TREE_NAME, analysis
.getAnalysisOutputFile(TREE_KEY).getLabel());
}
/**
* Tests out failing to get results for an analysis (missing output file).
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test(expected = GalaxyDatasetNotFoundException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisFail() throws InterruptedException, ExecutionManagerException,
IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException, TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisFail");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSingle);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
sequenceFilePathA, referenceFilePath, validWorkflowIdSingle);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
analysisWorkspaceService.getAnalysisResults(analysisSubmission);
}
}
| src/test/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/impl/integration/AnalysisWorkspaceServiceGalaxyIT.java | package ca.corefacility.bioinformatics.irida.service.analysis.workspace.galaxy.impl.integration;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.security.test.context.support.WithSecurityContextTestExcecutionListener;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestExecutionListeners;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.support.AnnotationConfigContextLoader;
import org.springframework.test.context.support.DependencyInjectionTestExecutionListener;
import com.github.jmchilton.blend4j.galaxy.GalaxyInstance;
import com.github.jmchilton.blend4j.galaxy.HistoriesClient;
import com.github.jmchilton.blend4j.galaxy.LibrariesClient;
import com.github.jmchilton.blend4j.galaxy.ToolsClient;
import com.github.jmchilton.blend4j.galaxy.WorkflowsClient;
import com.github.jmchilton.blend4j.galaxy.beans.History;
import com.github.jmchilton.blend4j.galaxy.beans.HistoryContents;
import com.github.jmchilton.blend4j.galaxy.beans.LibraryContent;
import com.github.jmchilton.blend4j.galaxy.beans.Workflow;
import com.github.jmchilton.blend4j.galaxy.beans.WorkflowInputs;
import com.github.jmchilton.blend4j.galaxy.beans.WorkflowInputs.WorkflowInput;
import com.github.springtestdbunit.DbUnitTestExecutionListener;
import com.github.springtestdbunit.annotation.DatabaseSetup;
import com.github.springtestdbunit.annotation.DatabaseTearDown;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import ca.corefacility.bioinformatics.irida.config.IridaApiGalaxyTestConfig;
import ca.corefacility.bioinformatics.irida.config.conditions.WindowsPlatformCondition;
import ca.corefacility.bioinformatics.irida.exceptions.DuplicateSampleException;
import ca.corefacility.bioinformatics.irida.exceptions.ExecutionManagerException;
import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowAnalysisTypeException;
import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowException;
import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowLoadException;
import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowNotFoundException;
import ca.corefacility.bioinformatics.irida.exceptions.IridaWorkflowParameterException;
import ca.corefacility.bioinformatics.irida.exceptions.SampleAnalysisDuplicateException;
import ca.corefacility.bioinformatics.irida.exceptions.WorkflowException;
import ca.corefacility.bioinformatics.irida.exceptions.galaxy.GalaxyDatasetNotFoundException;
import ca.corefacility.bioinformatics.irida.model.enums.AnalysisState;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFile;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequenceFilePair;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SequencingObject;
import ca.corefacility.bioinformatics.irida.model.sequenceFile.SingleEndSequenceFile;
import ca.corefacility.bioinformatics.irida.model.workflow.IridaWorkflow;
import ca.corefacility.bioinformatics.irida.model.workflow.analysis.Analysis;
import ca.corefacility.bioinformatics.irida.model.workflow.analysis.AnalysisPhylogenomicsPipeline;
import ca.corefacility.bioinformatics.irida.model.workflow.description.IridaWorkflowParameter;
import ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.PreparedWorkflowGalaxy;
import ca.corefacility.bioinformatics.irida.model.workflow.execution.galaxy.WorkflowInputsGalaxy;
import ca.corefacility.bioinformatics.irida.model.workflow.submission.AnalysisSubmission;
import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.GalaxyHistoriesService;
import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.GalaxyLibrariesService;
import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.integration.LocalGalaxy;
import ca.corefacility.bioinformatics.irida.pipeline.upload.galaxy.integration.Util;
import ca.corefacility.bioinformatics.irida.repositories.analysis.submission.AnalysisSubmissionRepository;
import ca.corefacility.bioinformatics.irida.repositories.sample.SampleRepository;
import ca.corefacility.bioinformatics.irida.service.DatabaseSetupGalaxyITService;
import ca.corefacility.bioinformatics.irida.service.analysis.workspace.galaxy.AnalysisWorkspaceServiceGalaxy;
import ca.corefacility.bioinformatics.irida.service.sample.SampleService;
import ca.corefacility.bioinformatics.irida.service.workflow.IridaWorkflowsService;
/**
* Tests out preparing a workspace for execution of workflows in Galaxy.
*
*
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(loader = AnnotationConfigContextLoader.class, classes = { IridaApiGalaxyTestConfig.class })
@ActiveProfiles("test")
@TestExecutionListeners({ DependencyInjectionTestExecutionListener.class, DbUnitTestExecutionListener.class,
WithSecurityContextTestExcecutionListener.class })
@DatabaseSetup("/ca/corefacility/bioinformatics/irida/repositories/analysis/AnalysisRepositoryIT.xml")
@DatabaseTearDown("/ca/corefacility/bioinformatics/irida/test/integration/TableReset.xml")
public class AnalysisWorkspaceServiceGalaxyIT {
@Autowired
private DatabaseSetupGalaxyITService analysisExecutionGalaxyITService;
@Autowired
private LocalGalaxy localGalaxy;
@Autowired
private AnalysisWorkspaceServiceGalaxy analysisWorkspaceService;
@Autowired
private IridaWorkflowsService iridaWorkflowsService;
@Autowired
private AnalysisSubmissionRepository analysisSubmissionRepository;
@Autowired
private SampleService sampleService;
@Autowired
private SampleRepository sampleRepository;
@Autowired
@Qualifier("rootTempDirectory")
private Path rootTempDirectory;
private GalaxyHistoriesService galaxyHistoriesService;
/**
* Timeout in seconds to stop polling a Galaxy library.
*/
private static final int LIBRARY_TIMEOUT = 5 * 60;
/**
* Polling time in seconds to poll a Galaxy library to check if
* datasets have been properly uploaded.
*/
private static final int LIBRARY_POLLING_TIME = 5;
private Path sequenceFilePathA;
private Path sequenceFilePath2A;
private Path sequenceFilePathB;
private Path sequenceFilePath2B;
private Path sequenceFilePath3;
private Path referenceFilePath;
private List<Path> pairSequenceFiles1A;
private List<Path> pairSequenceFiles2A;
private List<Path> pairSequenceFiles1AB;
private List<Path> pairSequenceFiles2AB;
private Set<SequencingObject> singleFileSet;
private static final UUID validWorkflowIdSingle = UUID.fromString("739f29ea-ae82-48b9-8914-3d2931405db6");
private static final UUID validWorkflowIdSingleSingleSample = UUID.fromString("a9692a52-5bc6-4da2-a89d-d880bb35bfe4");
private static final UUID validWorkflowIdPaired = UUID.fromString("ec93b50d-c9dd-4000-98fc-4a70d46ddd36");
private static final UUID validWorkflowIdPairedSingleSample = UUID.fromString("fc93b50d-c9dd-4000-98fc-4a70d46ddd36");
private static final UUID validWorkflowIdPairedWithParameters = UUID.fromString("23434bf8-e551-4efd-9957-e61c6f649f8b");
private static final UUID validWorkflowIdSinglePaired = UUID.fromString("d92e9918-1e3d-4dea-b2b9-089f1256ac1b");
private static final UUID phylogenomicsWorkflowId = UUID.fromString("1f9ea289-5053-4e4a-bc76-1f0c60b179f8");
private static final String OUTPUT1_KEY = "output1";
private static final String OUTPUT2_KEY = "output2";
private static final String OUTPUT1_NAME = "output1.txt";
private static final String OUTPUT2_NAME = "output2.txt";
private static final String MATRIX_NAME = "snpMatrix.tsv";
private static final String MATRIX_KEY = "matrix";
private static final String TREE_NAME = "phylogeneticTree.txt";
private static final String TREE_KEY = "tree";
private static final String TABLE_NAME = "snpTable.tsv";
private static final String TABLE_KEY = "table";
private static final String INPUTS_SINGLE_NAME = "irida_sequence_files_single";
private static final String INPUTS_PAIRED_NAME = "irida_sequence_files_paired";
private static final String SAMPLE1_NAME = "sample1";
/**
* Sets up variables for testing.
*
* @throws URISyntaxException
* @throws IOException
* @throws IridaWorkflowLoadException
*/
@Before
public void setup() throws URISyntaxException, IOException, IridaWorkflowLoadException {
Assume.assumeFalse(WindowsPlatformCondition.isWindows());
Path sequenceFilePathReal = Paths
.get(DatabaseSetupGalaxyITService.class.getResource("testData1.fastq").toURI());
Path referenceFilePathReal = Paths.get(DatabaseSetupGalaxyITService.class.getResource("testReference.fasta")
.toURI());
Path tempDir = Files.createTempDirectory(rootTempDirectory, "workspaceServiceGalaxyTest");
sequenceFilePathA = tempDir.resolve("testDataA_R1_001.fastq");
Files.copy(sequenceFilePathReal, sequenceFilePathA, StandardCopyOption.REPLACE_EXISTING);
sequenceFilePath2A = tempDir.resolve("testDataA_R2_001.fastq");
Files.copy(sequenceFilePathReal, sequenceFilePath2A, StandardCopyOption.REPLACE_EXISTING);
sequenceFilePathB = tempDir.resolve("testDataB_R1_001.fastq");
Files.copy(sequenceFilePathReal, sequenceFilePathB, StandardCopyOption.REPLACE_EXISTING);
sequenceFilePath2B = tempDir.resolve("testDataB_R2_001.fastq");
Files.copy(sequenceFilePathReal, sequenceFilePath2B, StandardCopyOption.REPLACE_EXISTING);
sequenceFilePath3 = tempDir.resolve("testData3_R1_001.fastq");
Files.copy(sequenceFilePathReal, sequenceFilePath3, StandardCopyOption.REPLACE_EXISTING);
referenceFilePath = Files.createTempFile("testReference", ".fasta");
Files.delete(referenceFilePath);
Files.copy(referenceFilePathReal, referenceFilePath);
singleFileSet = Sets.newHashSet(new SingleEndSequenceFile(new SequenceFile(sequenceFilePathA)));
GalaxyInstance galaxyInstanceAdmin = localGalaxy.getGalaxyInstanceAdmin();
HistoriesClient historiesClient = galaxyInstanceAdmin.getHistoriesClient();
ToolsClient toolsClient = galaxyInstanceAdmin.getToolsClient();
LibrariesClient librariesClient = galaxyInstanceAdmin.getLibrariesClient();
GalaxyLibrariesService galaxyLibrariesService = new GalaxyLibrariesService(librariesClient, LIBRARY_POLLING_TIME, LIBRARY_TIMEOUT, 1);
galaxyHistoriesService = new GalaxyHistoriesService(historiesClient, toolsClient, galaxyLibrariesService);
pairSequenceFiles1A = new ArrayList<>();
pairSequenceFiles1A.add(sequenceFilePathA);
pairSequenceFiles2A = new ArrayList<>();
pairSequenceFiles2A.add(sequenceFilePath2A);
pairSequenceFiles1AB = new ArrayList<>();
pairSequenceFiles1AB.add(sequenceFilePathA);
pairSequenceFiles1AB.add(sequenceFilePathB);
pairSequenceFiles2AB = new ArrayList<>();
pairSequenceFiles2AB.add(sequenceFilePath2A);
pairSequenceFiles2AB.add(sequenceFilePath2B);
}
/**
* Tests successfully preparing a workspace for analysis.
*
* @throws IridaWorkflowNotFoundException
* @throws ExecutionManagerException
*/
@Test
public void testPrepareAnalysisWorkspaceSuccess() throws IridaWorkflowNotFoundException, ExecutionManagerException {
AnalysisSubmission submission = AnalysisSubmission.builder(validWorkflowIdSingle)
.name("Name")
.inputFiles(singleFileSet)
.build();
assertNotNull("preparing an analysis workspace should not return null",
analysisWorkspaceService.prepareAnalysisWorkspace(submission));
}
/**
* Tests failure to prepare a workspace for analysis.
*
* @throws IridaWorkflowNotFoundException
* @throws ExecutionManagerException
*/
@Test(expected = IllegalArgumentException.class)
public void testPrepareAnalysisWorkspaceFail() throws IridaWorkflowNotFoundException, ExecutionManagerException {
AnalysisSubmission submission = AnalysisSubmission.builder(validWorkflowIdSingle)
.name("Name")
.inputFiles(singleFileSet)
.build();
submission.setRemoteAnalysisId("1");
analysisWorkspaceService.prepareAnalysisWorkspace(submission);
}
/**
* Tests out successfully preparing single workflow input files for
* execution.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesSingleSuccess() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesSingleSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
LibrariesClient librariesClient = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSingle);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
sequenceFilePathA, referenceFilePath, validWorkflowIdSingle);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(),
preparedWorkflow.getRemoteAnalysisId());
assertNotNull("the returned workflow inputs should not be null", preparedWorkflow.getWorkflowInputs());
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct library is created
List<LibraryContent> libraryContents = librariesClient.getLibraryContents(preparedWorkflow.getRemoteDataId());
Map<String, List<LibraryContent>> libraryContentsMap = libraryContents.stream().collect(Collectors.groupingBy(LibraryContent::getName));
assertFalse("the returned library should exist in Galaxy", libraryContentsMap.isEmpty());
String sequenceFileALibraryName = "/" + sequenceFilePathA.getFileName().toString();
assertEquals("the returned library does not contain the correct number of elements", 2,
libraryContentsMap.size());
assertTrue("the returned library does not contain a root folder", libraryContentsMap.containsKey("/"));
assertTrue("the returned library does not contain the correct sequence file",
libraryContentsMap.containsKey(sequenceFileALibraryName));
assertEquals("the returned library does not contain the correct sequence file", 1,
libraryContentsMap.get(sequenceFileALibraryName).size());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history should contain 3 entries", 3, historyContents.size());
Map<String, HistoryContents> contentsMap = historyContentsAsMap(historyContents);
assertTrue("the created history should contain the file " + sequenceFilePathA.toFile().getName(),
contentsMap.containsKey(sequenceFilePathA.toFile().getName()));
assertTrue("the created history should contain the file " + referenceFilePath.toFile().getName(),
contentsMap.containsKey(referenceFilePath.toFile().getName()));
assertTrue("the created history should contain the collection with name " + INPUTS_SINGLE_NAME,
contentsMap.containsKey(INPUTS_SINGLE_NAME));
// make sure workflow inputs contains correct information
Map<String, WorkflowInput> workflowInputsMap = preparedWorkflow.getWorkflowInputs().getInputsObject()
.getInputs();
assertEquals("the created workflow inputs has an invalid number of elements", 2, workflowInputsMap.size());
}
private Map<String, HistoryContents> historyContentsAsMap(List<HistoryContents> historyContents) {
return Maps.uniqueIndex(historyContents, historyContent -> historyContent.getName());
}
/**
* Tests out failing to prepare single workflow input files for execution
* (duplicate samples).
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test(expected = DuplicateSampleException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesSingleFail() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesSingleFail");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSingle);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
List<SingleEndSequenceFile> sequenceFiles = analysisExecutionGalaxyITService.setupSequencingObjectInDatabase(
1L, sequenceFilePathA, sequenceFilePath2A);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
Sets.newHashSet(sequenceFiles), referenceFilePath, validWorkflowIdSingle);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
}
/**
* Tests out successfully preparing paired workflow input files for
* execution.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesPairSuccess() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesPairSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
LibrariesClient librariesClient = localGalaxy.getGalaxyInstanceAdmin().getLibrariesClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, validWorkflowIdPaired);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(),
preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct library is created
List<LibraryContent> libraryContents = librariesClient.getLibraryContents(preparedWorkflow.getRemoteDataId());
Map<String, List<LibraryContent>> libraryContentsMap = libraryContents.stream().collect(Collectors.groupingBy(LibraryContent::getName));
assertFalse("the returned library should exist in Galaxy", libraryContentsMap.isEmpty());
String sequenceFile1ALibraryName = "/" + sequenceFilePathA.getFileName().toString();
String sequenceFile2ALibraryName = "/" + sequenceFilePath2A.getFileName().toString();
assertEquals("the returned library does not contain the correct number of elements", 3,
libraryContentsMap.size());
assertTrue("the returned library does not contain a root folder", libraryContentsMap.containsKey("/"));
assertTrue("the returned library does not contain the correct sequence file",
libraryContentsMap.containsKey(sequenceFile1ALibraryName));
assertEquals("the returned library does not contain the correct sequence file", 1,
libraryContentsMap.get(sequenceFile1ALibraryName).size());
assertTrue("the returned library does not contain the correct sequence file",
libraryContentsMap.containsKey(sequenceFile2ALibraryName));
assertEquals("the returned library does not contain the correct sequence file", 1,
libraryContentsMap.get(sequenceFile2ALibraryName).size());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 4, historyContents.size());
Map<String, HistoryContents> contentsMap = historyContentsAsMap(historyContents);
assertTrue("the created history should contain the file " + sequenceFilePathA.toFile().getName(),
contentsMap.containsKey(sequenceFilePathA.toFile().getName()));
assertTrue("the created history should contain the file " + sequenceFilePath2A.toFile().getName(),
contentsMap.containsKey(sequenceFilePath2A.toFile().getName()));
assertTrue("the created history should contain the file " + referenceFilePath.toFile().getName(),
contentsMap.containsKey(referenceFilePath.toFile().getName()));
assertTrue("the created history should contain the collection with name " + INPUTS_PAIRED_NAME,
contentsMap.containsKey(INPUTS_PAIRED_NAME));
// make sure workflow inputs contains correct information
Map<String, WorkflowInput> workflowInputsMap = preparedWorkflow.getWorkflowInputs().getInputsObject()
.getInputs();
assertEquals("the created workflow inputs has an invalid number of elements", 2, workflowInputsMap.size());
}
/**
* Tests out successfully preparing paired workflow input files for
* execution with parameters.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesParametersSuccess() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesParametersSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedWithParameters);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
Map<String, String> parameters = ImmutableMap.of("coverage", "20");
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, parameters,
validWorkflowIdPairedWithParameters);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(),
preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 4, historyContents.size());
WorkflowInputs workflowInputs = preparedWorkflow.getWorkflowInputs().getInputsObject();
assertNotNull("created workflowInputs is null", workflowInputs);
Map<String, Object> toolParameters = workflowInputs.getParameters().get(
"core_pipeline_outputs_paired_with_parameters");
assertNotNull("toolParameters is null", toolParameters);
String coverageMinValue = (String) toolParameters.get("coverageMin");
assertEquals("coverageMinValue should have been changed", "20", coverageMinValue);
assertEquals("coverageMidValue should have been changed",
ImmutableMap.of("coverageMid", "20"), toolParameters.get("conditional"));
String coverageMaxValue = (String) toolParameters.get("coverageMin");
assertEquals("coverageMaxValue should have been changed", "20", coverageMaxValue);
}
/**
* Tests out successfully preparing paired workflow input files for
* execution, no parameters set.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesParametersSuccessWithNoParameters() throws InterruptedException,
ExecutionManagerException, IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesParametersSuccessWithNoParameters");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedWithParameters);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, validWorkflowIdPairedWithParameters);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(),
preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 4, historyContents.size());
WorkflowInputs workflowInputs = preparedWorkflow.getWorkflowInputs().getInputsObject();
assertNotNull("created workflowInputs is null", workflowInputs);
Map<String, Object> toolParameters = workflowInputs.getParameters().get(
"core_pipeline_outputs_paired_with_parameters");
assertNotNull("toolParameters is null", toolParameters);
String coverageMinValue = (String) toolParameters.get("coverageMin");
assertEquals("coverageMinValue should have been changed to default", "10", coverageMinValue);
assertEquals("coverageMidValue should have been changed to default",
ImmutableMap.of("coverageMid", "10"), toolParameters.get("conditional"));
String coverageMaxValue = (String) toolParameters.get("coverageMin");
assertEquals("coverageMaxValue should have been changed to default", "10", coverageMaxValue);
}
/**
* Tests out successfully preparing paired workflow input files for
* execution and ignoring default parameters.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesParametersSuccessIgnoreDefaultParameters() throws InterruptedException,
ExecutionManagerException, IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesParametersSuccessIgnoreDefaultParameters");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedWithParameters);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
Map<String, String> parameters = ImmutableMap.of("coverage", IridaWorkflowParameter.IGNORE_DEFAULT_VALUE);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, parameters, validWorkflowIdPairedWithParameters);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(),
preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
assertNotNull("the returned library id should not be null", preparedWorkflow.getRemoteDataId());
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 4, historyContents.size());
WorkflowInputs workflowInputs = preparedWorkflow.getWorkflowInputs().getInputsObject();
assertNotNull("created workflowInputs is null", workflowInputs);
Map<String, Object> toolParameters = workflowInputs.getParameters().get(
"core_pipeline_outputs_paired_with_parameters");
assertNull("toolParameters is not null", toolParameters);
}
/**
* Tests out failing to prepare paired workflow input files for execution
* with parameters due to an invalid parameter passed.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test(expected = IridaWorkflowParameterException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesParametersFailInvalidParameter() throws InterruptedException,
ExecutionManagerException, IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesParametersFailInvalidParameter");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedWithParameters);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
Map<String, String> parameters = ImmutableMap.of("invalid", "20");
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
pairSequenceFiles1A, pairSequenceFiles2A, referenceFilePath, parameters,
validWorkflowIdPairedWithParameters);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
}
/**
* Tests out failing to prepare paired workflow input files for execution
* (duplicate sample).
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test(expected = DuplicateSampleException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesPairFail() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesPairFail");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
// construct two pairs of sequence files with same sample (1L)
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
pairSequenceFiles1AB, pairSequenceFiles2AB, referenceFilePath, validWorkflowIdPaired);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
}
/**
* Tests out successfully preparing paired and single workflow input files
* for execution.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesSinglePairSuccess() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesPairSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSinglePaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService
.setupSinglePairSubmissionInDatabaseDifferentSample(1L, 2L, pairSequenceFiles1A, pairSequenceFiles2A,
sequenceFilePath3, referenceFilePath, validWorkflowIdSinglePaired);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
PreparedWorkflowGalaxy preparedWorkflow = analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
assertEquals("the response history id should match the input history id", createdHistory.getId(),
preparedWorkflow.getRemoteAnalysisId());
WorkflowInputsGalaxy workflowInputsGalaxy = preparedWorkflow.getWorkflowInputs();
assertNotNull("the returned workflow inputs should not be null", workflowInputsGalaxy);
// verify correct files have been uploaded
List<HistoryContents> historyContents = historiesClient.showHistoryContents(createdHistory.getId());
assertEquals("the created history has an invalid number of elements", 6, historyContents.size());
Map<String, HistoryContents> contentsMap = historyContentsAsMap(historyContents);
assertTrue("the created history should contain the file " + sequenceFilePathA.toFile().getName(),
contentsMap.containsKey(sequenceFilePathA.toFile().getName()));
assertTrue("the created history should contain the file " + sequenceFilePath2A.toFile().getName(),
contentsMap.containsKey(sequenceFilePath2A.toFile().getName()));
assertTrue("the created history should contain the file " + sequenceFilePath3.toFile().getName(),
contentsMap.containsKey(sequenceFilePath3.toFile().getName()));
assertTrue("the created history should contain the file " + referenceFilePath.toFile().getName(),
contentsMap.containsKey(referenceFilePath.toFile().getName()));
assertTrue("the created history should contain a dataset collection with the name " + INPUTS_SINGLE_NAME,
contentsMap.containsKey(INPUTS_SINGLE_NAME));
assertTrue("the created history should contain a dataset collection with the name " + INPUTS_PAIRED_NAME,
contentsMap.containsKey(INPUTS_PAIRED_NAME));
// make sure workflow inputs contains correct information
Map<String, WorkflowInput> workflowInputsMap = preparedWorkflow.getWorkflowInputs().getInputsObject()
.getInputs();
assertEquals("the created workflow inputs has an invalid number of elements", 3, workflowInputsMap.size());
}
/**
* Tests out failing to prepare paired and single workflow input files for
* execution (duplicate samples among single and paired input files).
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test(expected = SampleAnalysisDuplicateException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesSinglePairFail() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesSinglePairFail");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
History createdHistory = historiesClient.create(history);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSinglePaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService
.setupSinglePairSubmissionInDatabaseSameSample(1L, pairSequenceFiles1A, pairSequenceFiles2A,
sequenceFilePath3, referenceFilePath, validWorkflowIdSinglePaired);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
}
/**
* Tests out failure to prepare workflow input files for execution.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IOException
* @throws IridaWorkflowException
*/
@Test(expected = WorkflowException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testPrepareAnalysisFilesFail() throws InterruptedException, ExecutionManagerException,
IOException, IridaWorkflowException {
History history = new History();
history.setName("testPrepareAnalysisFilesFail");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
History createdHistory = historiesClient.create(history);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
sequenceFilePathA, referenceFilePath, validWorkflowIdSingle);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId("invalid");
analysisWorkspaceService.prepareAnalysisFiles(analysisSubmission);
}
private void uploadFileToHistory(Path filePath, String fileName, String historyId, ToolsClient toolsClient) {
ToolsClient.FileUploadRequest uploadRequest = new ToolsClient.FileUploadRequest(historyId, filePath.toFile());
uploadRequest.setDatasetName(fileName);
toolsClient.upload(uploadRequest);
}
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* consisting only of single end sequence reads.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
* @throws IridaWorkflowAnalysisLabelException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisSingleSuccess() throws InterruptedException,
ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException,
TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisSingleSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSingle);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
sequenceFilePathA, referenceFilePath, validWorkflowIdSingle);
assertEquals("the created submission should have no paired input files", 0, analysisSubmission
.getPairedInputFiles().size());
Set<SingleEndSequenceFile> submittedSf = analysisSubmission.getInputFilesSingleEnd();
assertEquals("the created submission should have 1 single input file", 1, submittedSf.size());
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT1_NAME, analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT2_NAME, analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* consisting only of single end sequence reads (for workflow accepting single sample).
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisSingleSingleSampleSuccess() throws InterruptedException,
ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException,
TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisSingleSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSingleSingleSample);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
sequenceFilePathA, referenceFilePath, validWorkflowIdSingleSingleSample);
assertEquals("the created submission should have no paired input files", 0, analysisSubmission
.getPairedInputFiles().size());
Set<SingleEndSequenceFile> submittedSf = analysisSubmission.getInputFilesSingleEnd();
assertEquals("the created submission should have 1 single input file", 1, submittedSf.size());
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", SAMPLE1_NAME + '-' + OUTPUT1_NAME, analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", SAMPLE1_NAME + '-' + OUTPUT2_NAME, analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* consisting only of paired sequence reads.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisPairedSuccess() throws InterruptedException,
ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException,
TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisPairedSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
List<Path> paths1 = new ArrayList<>();
paths1.add(sequenceFilePathA);
List<Path> paths2 = new ArrayList<>();
paths2.add(sequenceFilePath2A);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
paths1, paths2, referenceFilePath, validWorkflowIdPaired);
assertEquals("the created submission should have no single input files", 0, analysisSubmission
.getInputFilesSingleEnd().size());
Set<SequenceFilePair> pairedFiles = analysisSubmission.getPairedInputFiles();
assertEquals("the created submission has an invalid number of paired input files", 1, pairedFiles.size());
SequenceFilePair submittedSp = pairedFiles.iterator().next();
Set<SequenceFile> submittedSf = submittedSp.getFiles();
assertEquals("the paired input should have 2 files", 2, submittedSf.size());
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT1_NAME, analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT2_NAME, analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* consisting only of paired sequence reads.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisPairedSingleSampleSuccess() throws InterruptedException,
ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException,
TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisPairedSingleSampleSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedSingleSample);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
List<Path> paths1 = new ArrayList<>();
paths1.add(sequenceFilePathA);
List<Path> paths2 = new ArrayList<>();
paths2.add(sequenceFilePath2A);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
paths1, paths2, referenceFilePath, validWorkflowIdPairedSingleSample);
assertEquals("the created submission should have no single input files", 0, analysisSubmission
.getInputFilesSingleEnd().size());
Set<SequenceFilePair> pairedFiles = analysisSubmission.getPairedInputFiles();
assertEquals("the created submission has an invalid number of paired input files", 1, pairedFiles.size());
SequenceFilePair submittedSp = pairedFiles.iterator().next();
Set<SequenceFile> submittedSf = submittedSp.getFiles();
assertEquals("the paired input should have 2 files", 2, submittedSf.size());
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", SAMPLE1_NAME + "-" + OUTPUT1_NAME, analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", SAMPLE1_NAME + "-" + OUTPUT2_NAME, analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* when sequencing objects are present, but the sample was deleted while pipeline was running.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisDeleteSampleRunningSuccess() throws InterruptedException,
ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException,
TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisDeleteSampleRunningSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdPairedSingleSample);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
List<Path> paths1 = new ArrayList<>();
paths1.add(sequenceFilePathA);
List<Path> paths2 = new ArrayList<>();
paths2.add(sequenceFilePath2A);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
paths1, paths2, referenceFilePath, validWorkflowIdPairedSingleSample);
sampleRepository.delete(1L);
assertTrue(!sampleService.exists(1L));
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT1_NAME, analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT2_NAME, analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
/**
* Tests out successfully getting results for an analysis (TestAnalysis)
* consisting of both single and paired sequence reads.
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisSinglePairedSuccess() throws InterruptedException,
ExecutionManagerException, IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException,
TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisSinglePairedSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, OUTPUT2_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSinglePaired);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
List<Path> paths1 = new ArrayList<>();
paths1.add(sequenceFilePathA);
List<Path> paths2 = new ArrayList<>();
paths2.add(sequenceFilePath2A);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService
.setupSinglePairSubmissionInDatabaseSameSample(1L, paths1, paths2, sequenceFilePath3,
referenceFilePath, validWorkflowIdSinglePaired);
Set<SingleEndSequenceFile> singleFiles = analysisSubmission.getInputFilesSingleEnd();
assertEquals("invalid number of single end input files", 1, singleFiles.size());
Set<SequenceFilePair> pairedFiles = analysisSubmission.getPairedInputFiles();
assertEquals("invalid number of paired end inputs", 1, pairedFiles.size());
SequenceFilePair submittedSp = pairedFiles.iterator().next();
Set<SequenceFile> submittedSf = submittedSp.getFiles();
assertEquals("invalid number of files for paired input", 2, submittedSf.size());
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", Analysis.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 2, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT1_NAME), analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT1_NAME, analysis
.getAnalysisOutputFile(OUTPUT1_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(OUTPUT2_NAME), analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", OUTPUT2_NAME, analysis
.getAnalysisOutputFile(OUTPUT2_KEY).getLabel());
}
/**
* Tests out successfully getting results for an analysis (phylogenomics).
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsPhylogenomicsSuccess() throws InterruptedException, ExecutionManagerException,
IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException, TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsPhylogenomicsSuccess");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, TABLE_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, MATRIX_NAME, createdHistory.getId(), toolsClient);
uploadFileToHistory(sequenceFilePathA, TREE_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(phylogenomicsWorkflowId);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
sequenceFilePathA, referenceFilePath, phylogenomicsWorkflowId);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
Analysis analysis = analysisWorkspaceService.getAnalysisResults(analysisSubmission);
assertNotNull("the analysis results were not properly created", analysis);
assertEquals("the Analysis results class is invalid", AnalysisPhylogenomicsPipeline.class, analysis.getClass());
assertEquals("the analysis results has an invalid number of output files", 3, analysis.getAnalysisOutputFiles()
.size());
assertEquals("the analysis results output file has an invalid name", Paths.get(TABLE_NAME), analysis
.getAnalysisOutputFile(TABLE_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", TABLE_NAME, analysis
.getAnalysisOutputFile(TABLE_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(MATRIX_NAME), analysis
.getAnalysisOutputFile(MATRIX_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", MATRIX_NAME, analysis
.getAnalysisOutputFile(MATRIX_KEY).getLabel());
assertEquals("the analysis results output file has an invalid name", Paths.get(TREE_NAME), analysis
.getAnalysisOutputFile(TREE_KEY).getFile().getFileName());
assertEquals("the analysis results output file has an invalid label", TREE_NAME, analysis
.getAnalysisOutputFile(TREE_KEY).getLabel());
}
/**
* Tests out failing to get results for an analysis (missing output file).
*
* @throws InterruptedException
* @throws ExecutionManagerException
* @throws IridaWorkflowNotFoundException
* @throws IOException
* @throws IridaWorkflowAnalysisTypeException
* @throws TimeoutException
*/
@Test(expected = GalaxyDatasetNotFoundException.class)
@WithMockUser(username = "aaron", roles = "ADMIN")
public void testGetAnalysisResultsTestAnalysisFail() throws InterruptedException, ExecutionManagerException,
IridaWorkflowNotFoundException, IOException, IridaWorkflowAnalysisTypeException, TimeoutException {
History history = new History();
history.setName("testGetAnalysisResultsTestAnalysisFail");
HistoriesClient historiesClient = localGalaxy.getGalaxyInstanceAdmin().getHistoriesClient();
WorkflowsClient workflowsClient = localGalaxy.getGalaxyInstanceAdmin().getWorkflowsClient();
ToolsClient toolsClient = localGalaxy.getGalaxyInstanceAdmin().getToolsClient();
History createdHistory = historiesClient.create(history);
// upload test outputs
uploadFileToHistory(sequenceFilePathA, OUTPUT1_NAME, createdHistory.getId(), toolsClient);
// wait for history
Util.waitUntilHistoryComplete(createdHistory.getId(), galaxyHistoriesService, 60);
IridaWorkflow iridaWorkflow = iridaWorkflowsService.getIridaWorkflow(validWorkflowIdSingle);
Path workflowPath = iridaWorkflow.getWorkflowStructure().getWorkflowFile();
String workflowString = new String(Files.readAllBytes(workflowPath), StandardCharsets.UTF_8);
Workflow galaxyWorkflow = workflowsClient.importWorkflow(workflowString);
AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
sequenceFilePathA, referenceFilePath, validWorkflowIdSingle);
analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
analysisSubmission.setRemoteWorkflowId(galaxyWorkflow.getId());
analysisSubmission.setAnalysisState(AnalysisState.COMPLETING);
analysisSubmissionRepository.save(analysisSubmission);
analysisWorkspaceService.getAnalysisResults(analysisSubmission);
}
}
| refactored out all use of analysissubmission input getters
| src/test/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/impl/integration/AnalysisWorkspaceServiceGalaxyIT.java | refactored out all use of analysissubmission input getters | <ide><path>rc/test/java/ca/corefacility/bioinformatics/irida/service/analysis/workspace/galaxy/impl/integration/AnalysisWorkspaceServiceGalaxyIT.java
<ide> import ca.corefacility.bioinformatics.irida.repositories.analysis.submission.AnalysisSubmissionRepository;
<ide> import ca.corefacility.bioinformatics.irida.repositories.sample.SampleRepository;
<ide> import ca.corefacility.bioinformatics.irida.service.DatabaseSetupGalaxyITService;
<add>import ca.corefacility.bioinformatics.irida.service.SequencingObjectService;
<ide> import ca.corefacility.bioinformatics.irida.service.analysis.workspace.galaxy.AnalysisWorkspaceServiceGalaxy;
<ide> import ca.corefacility.bioinformatics.irida.service.sample.SampleService;
<ide> import ca.corefacility.bioinformatics.irida.service.workflow.IridaWorkflowsService;
<ide>
<ide> @Autowired
<ide> private SampleRepository sampleRepository;
<add>
<add> @Autowired
<add> private SequencingObjectService sequencingObjectService;
<ide>
<ide> @Autowired
<ide> @Qualifier("rootTempDirectory")
<ide>
<ide> AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
<ide> sequenceFilePathA, referenceFilePath, validWorkflowIdSingle);
<del> assertEquals("the created submission should have no paired input files", 0, analysisSubmission
<del> .getPairedInputFiles().size());
<del> Set<SingleEndSequenceFile> submittedSf = analysisSubmission.getInputFilesSingleEnd();
<add>
<add> Set<SingleEndSequenceFile> submittedSf = sequencingObjectService
<add> .getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SingleEndSequenceFile.class);
<add> Set<SequenceFilePair> pairedFiles = sequencingObjectService
<add> .getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SequenceFilePair.class);
<add> assertEquals("the created submission should have no paired input files", 0, pairedFiles.size());
<ide> assertEquals("the created submission should have 1 single input file", 1, submittedSf.size());
<ide>
<ide> analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
<ide>
<ide> AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupSubmissionInDatabase(1L,
<ide> sequenceFilePathA, referenceFilePath, validWorkflowIdSingleSingleSample);
<del> assertEquals("the created submission should have no paired input files", 0, analysisSubmission
<del> .getPairedInputFiles().size());
<del> Set<SingleEndSequenceFile> submittedSf = analysisSubmission.getInputFilesSingleEnd();
<add>
<add> Set<SingleEndSequenceFile> submittedSf = sequencingObjectService
<add> .getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SingleEndSequenceFile.class);
<add> Set<SequenceFilePair> pairedFiles = sequencingObjectService
<add> .getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SequenceFilePair.class);
<add> assertEquals("the created submission should have no paired input files", 0, pairedFiles.size());
<ide> assertEquals("the created submission should have 1 single input file", 1, submittedSf.size());
<ide>
<ide> analysisSubmission.setRemoteAnalysisId(createdHistory.getId());
<ide>
<ide> AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
<ide> paths1, paths2, referenceFilePath, validWorkflowIdPaired);
<del> assertEquals("the created submission should have no single input files", 0, analysisSubmission
<del> .getInputFilesSingleEnd().size());
<del> Set<SequenceFilePair> pairedFiles = analysisSubmission.getPairedInputFiles();
<add>
<add> Set<SingleEndSequenceFile> submittedSingleFiles = sequencingObjectService
<add> .getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SingleEndSequenceFile.class);
<add> Set<SequenceFilePair> pairedFiles = sequencingObjectService
<add> .getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SequenceFilePair.class);
<add>
<add> assertEquals("the created submission should have no single input files", 0, submittedSingleFiles.size());
<ide> assertEquals("the created submission has an invalid number of paired input files", 1, pairedFiles.size());
<ide> SequenceFilePair submittedSp = pairedFiles.iterator().next();
<ide> Set<SequenceFile> submittedSf = submittedSp.getFiles();
<ide>
<ide> AnalysisSubmission analysisSubmission = analysisExecutionGalaxyITService.setupPairSubmissionInDatabase(1L,
<ide> paths1, paths2, referenceFilePath, validWorkflowIdPairedSingleSample);
<del> assertEquals("the created submission should have no single input files", 0, analysisSubmission
<del> .getInputFilesSingleEnd().size());
<del> Set<SequenceFilePair> pairedFiles = analysisSubmission.getPairedInputFiles();
<add>
<add> Set<SingleEndSequenceFile> submittedSingleFiles = sequencingObjectService
<add> .getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SingleEndSequenceFile.class);
<add> Set<SequenceFilePair> pairedFiles = sequencingObjectService
<add> .getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SequenceFilePair.class);
<add>
<add> assertEquals("the created submission should have no single input files", 0, submittedSingleFiles.size());
<ide> assertEquals("the created submission has an invalid number of paired input files", 1, pairedFiles.size());
<ide> SequenceFilePair submittedSp = pairedFiles.iterator().next();
<ide> Set<SequenceFile> submittedSf = submittedSp.getFiles();
<ide> .setupSinglePairSubmissionInDatabaseSameSample(1L, paths1, paths2, sequenceFilePath3,
<ide> referenceFilePath, validWorkflowIdSinglePaired);
<ide>
<del> Set<SingleEndSequenceFile> singleFiles = analysisSubmission.getInputFilesSingleEnd();
<add> Set<SingleEndSequenceFile> singleFiles = sequencingObjectService
<add> .getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SingleEndSequenceFile.class);
<add> Set<SequenceFilePair> pairedFiles = sequencingObjectService
<add> .getSequencingObjectsOfTypeForAnalysisSubmission(analysisSubmission, SequenceFilePair.class);
<add>
<ide> assertEquals("invalid number of single end input files", 1, singleFiles.size());
<del> Set<SequenceFilePair> pairedFiles = analysisSubmission.getPairedInputFiles();
<ide> assertEquals("invalid number of paired end inputs", 1, pairedFiles.size());
<ide> SequenceFilePair submittedSp = pairedFiles.iterator().next();
<ide> Set<SequenceFile> submittedSf = submittedSp.getFiles(); |
|
Java | mit | 403ceb6551bee775679900112ef60ca6465bd20e | 0 | lemmy/tlaplus,tlaplus/tlaplus,lemmy/tlaplus,lemmy/tlaplus,lemmy/tlaplus,tlaplus/tlaplus,tlaplus/tlaplus,tlaplus/tlaplus | package org.lamport.tla.toolbox.ui.property;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.preference.StringFieldEditor;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.lamport.tla.toolbox.spec.Spec;
import org.lamport.tla.toolbox.util.IHelpConstants;
import org.lamport.tla.toolbox.util.UIHelper;
import org.lamport.tla.toolbox.util.pref.IPreferenceConstants;
import org.lamport.tla.toolbox.util.pref.PreferenceStoreHelper;
/**
* Represents specification properties
* @author Simon Zambrovski
* @version $Id$
*/
public class SpecPropertyPage extends GenericFieldEditorPropertyPage
{
private StringFieldEditor pcalParamEditor;
protected Control createContents(Composite parent)
{
Control control = super.createContents(parent);
// ensure the page has no special buttons
noDefaultAndApplyButton();
UIHelper.setHelp(control, IHelpConstants.SPEC_PROPERTY_PAGE);
return control;
}
public void createFieldEditors(Composite composite)
{
// TODO change root file
StringFieldEditor rootFileEditor = new StringFieldEditor(IPreferenceConstants.P_PROJECT_ROOT_FILE,
"Specification root module", composite);
addEditor(rootFileEditor);
rootFileEditor.getTextControl(composite).setEditable(false);
pcalParamEditor = new StringFieldEditor(IPreferenceConstants.PCAL_CAL_PARAMS, "PCal call arguments", composite);
addEditor(pcalParamEditor);
}
protected IPreferenceStore doGetPreferenceStore()
{
Spec spec = (Spec) getElement();
return PreferenceStoreHelper.getProjectPreferenceStore(spec.getProject());
}
}
| org.lamport.tla.toolbox/src/org/lamport/tla/toolbox/ui/property/SpecPropertyPage.java | package org.lamport.tla.toolbox.ui.property;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.preference.StringFieldEditor;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.lamport.tla.toolbox.spec.Spec;
import org.lamport.tla.toolbox.util.IHelpConstants;
import org.lamport.tla.toolbox.util.UIHelper;
import org.lamport.tla.toolbox.util.pref.IPreferenceConstants;
import org.lamport.tla.toolbox.util.pref.PreferenceStoreHelper;
/**
* Represents specification properties
* @author Simon Zambrovski
* @version $Id$
*/
public class SpecPropertyPage extends GenericFieldEditorPropertyPage
{
protected Control createContents(Composite parent)
{
Control control = super.createContents(parent);
UIHelper.setHelp(control, IHelpConstants.SPEC_PROPERTY_PAGE);
return control;
}
public void createFieldEditors(Composite composite)
{
StringFieldEditor rootFileEditor = new StringFieldEditor(IPreferenceConstants.P_PROJECT_ROOT_FILE,
"Specification root module", composite);
addEditor(rootFileEditor);
}
protected IPreferenceStore doGetPreferenceStore()
{
Spec spec = (Spec) getElement();
return PreferenceStoreHelper.getProjectPreferenceStore(spec.getProject());
}
}
| property changed
git-svn-id: 7acc490bd371dbc82047a939b87dc892fdc31f59@13304 76a6fc44-f60b-0410-a9a8-e67b0e8fc65c
| org.lamport.tla.toolbox/src/org/lamport/tla/toolbox/ui/property/SpecPropertyPage.java | property changed | <ide><path>rg.lamport.tla.toolbox/src/org/lamport/tla/toolbox/ui/property/SpecPropertyPage.java
<ide> */
<ide> public class SpecPropertyPage extends GenericFieldEditorPropertyPage
<ide> {
<del>
<add> private StringFieldEditor pcalParamEditor;
<ide>
<ide> protected Control createContents(Composite parent)
<ide> {
<ide> Control control = super.createContents(parent);
<add>
<add> // ensure the page has no special buttons
<add> noDefaultAndApplyButton();
<add>
<ide> UIHelper.setHelp(control, IHelpConstants.SPEC_PROPERTY_PAGE);
<ide> return control;
<ide> }
<ide>
<ide> public void createFieldEditors(Composite composite)
<ide> {
<add> // TODO change root file
<ide> StringFieldEditor rootFileEditor = new StringFieldEditor(IPreferenceConstants.P_PROJECT_ROOT_FILE,
<ide> "Specification root module", composite);
<ide> addEditor(rootFileEditor);
<add> rootFileEditor.getTextControl(composite).setEditable(false);
<add>
<add> pcalParamEditor = new StringFieldEditor(IPreferenceConstants.PCAL_CAL_PARAMS, "PCal call arguments", composite);
<add> addEditor(pcalParamEditor);
<add>
<ide> }
<ide>
<ide> |
|
JavaScript | mit | 8318612ecc12372034fdbddb5b3cc96ec9d33db5 | 0 | ghiringh/Wegas,Heigvd/Wegas,Heigvd/Wegas,Heigvd/Wegas,Heigvd/Wegas,Heigvd/Wegas,ghiringh/Wegas,ghiringh/Wegas | /*
* Wegas
* http://wegas.albasim.ch
*
* Copyright (c) 2013, 2014, 2015 School of Business and Engineering Vaud, Comem
* Licensed under the MIT License
*/
/**
* @fileoverview
* @author Francois-Xavier Aeberhard <[email protected]>
*/
YUI.add("wegas-button", function(Y) {
"use strict";
var CONTENTBOX = 'contentBox',
BOUNDINGBOX = 'boundingBox',
Wegas = Y.Wegas, Button, ToggleButton, MarkAsUnread;
/**
* @name Y.Wegas.Button
* @extends Y.Button
* @borrows Y.WidgetChild, Y.Wegas.Widget, Y.Wegas.Editable
* @class Custom Button implementation.
* @constructor
* @description Custom Button implementation. Adds Y.WidgetChild and
* Y.Wegas.Widget extensions to the original Y.Button
*/
Button = Y.Base.create("button", Y.Button, [Y.WidgetChild, Wegas.Widget, Wegas.Editable], {
/** @lends Y.Wegas.Button# */
// *** Private fields *** //
// *** Lifecycle Methods *** //
/**
* @function
* @private
* @description Set variable with initials values.
* Plug tooltip add given (by ATTRS) css class to contentbox
*/
initializer: function() {
Button.superclass.initializer.apply(this, arguments);
this.publish("click", {
emitFacade: true,
bubbles: true,
defaultFn: function() { // Force event activation by default
}
});
//this.constructor.CSS_PREFIX = "yui3-button"; // Revert changes done by Y.Wegas.Widget so styling will work
this._cssPrefix = "yui3-button";
if (this.get("cssClass")) {
this.get(CONTENTBOX).addClass(this.get("cssClass"));
}
if (this.get("tooltip")) {
this.plug(Y.Plugin.Tooltip, {
content: Y.Template.Micro.compile(this.get("tooltip"))()
});
}
},
getEditorLabel: function() {
return Wegas.Helper.stripHtml(this.get("label"));
},
/**
* @function
* @private
* @description Call widget parent to execute its proper render function.
* add "wegas-button" class to bounding box.
*/
renderUI: function() {
Button.superclass.renderUI.apply(this, arguments);
this.get(BOUNDINGBOX).addClass("wegas-button");
},
_getLabel: function(value) {
return value;
}
}, {
/**
* @lends Y.Wegas.Button
*/
EDITORNAME: "Button",
/**
* @field
* @static
* @description
* <p><strong>Attributes</strong></p>
* <ul>
* <li>label: the label of the button</li>
* <li>data: the data used by the button</li>
* <li>tooltip: the tooltip of the button</li>
* <li>disabled: boolean to choose state of the button</li>
* <li>cssClass: cssClass of the button</li>
* <li>plugins: impact to bind at the buttons</li>
* </ul>
*/
ATTRS: {
label: {
type: "string",
optional: true
},
labelHTML: {
"transient": true
},
data: {},
tooltip: {
type: "string",
optional: true,
"transient": true
},
disabled: {
type: "boolean",
_inputex: {
wrapperClassName: 'inputEx-fieldWrapper wegas-advanced-feature'
}
},
cssClass: {
value: null
}
}
});
Wegas.Button = Button;
/* @fixme @hack So we can display html tag inside a button */
Y.Button.prototype._setLabel = function(label, name, opts) {
if (!opts || opts.src !== 'internal') {
this.set('labelHTML', Y.Template.Micro.compile(label || "")(), {src: 'internal'});
}
return label;
};
/**
* Plugin which adds an unread message counter to a widget.
*
* @class Y.Wegas.UnreadCount
* @extends Y.Plugin.Base
* @borrows Y.Wegas.Plugin, Y.Wegas.Editable
*/
var UnreadCount = Y.Base.create("wegas-unreadCount", Y.Plugin.Base, [Wegas.Plugin, Wegas.Editable], {
/** @lends Y.Plugin.UnreadCount# */
// *** Private fields *** //
// *** Lifecycle methods *** //
/**
* @function
* @private
* @description Set variable with initials values.
*/
initializer: function() {
var k;
this.handlers = {};
this._counters = {
"InboxDescriptor": function(descriptor, instance, resolve) {
resolve(instance.get("unreadCount"));
},
"DialogueDescriptor": function(descriptor, instance, resolve) {
var state = descriptor.getCurrentState();
if (!instance.get("enabled")) {
return false;
}
state.getAvailableActions(function(availableActions) {
resolve(availableActions.length > 0 ? 1 : 0);
});
},
"QuestionDescriptor": function(descriptor, instance, resolve) {
if (instance.get("replies")) {
resolve(instance.get("replies").length === 0 && instance.get("active") ? 1 : 0); // only count if it is active
}
resolve(0);
},
"PeerReviewDescriptor": function(descriptor, instance, resolve) {
var i, j, k, types = ["toReview", "reviewed"],
reviews, review,
counter = 0;
for (i = 0; i < 2; i++) {
reviews = instance.get(types[i]);
for (j = 0; j < reviews.length; j++) {
review = reviews[j];
if ((i === 0 && review.get("reviewState") === "DISPATCHED") ||
(i === 1 && review.get("reviewState") === "NOTIFIED")) {
counter++;
}
}
}
resolve(counter);
}
};
for (k in this.get("userCounters")) {
this._counters[k] = eval("(" + this.get("userCounters")[k] + ")");
}
this.bindUI();
},
/**
* @function
* @private
* @description bind function to events.
* When VariableDescriptorFacade is updated, do sync.
* When plugin's host is render, do sync.
*/
bindUI: function() {
this.handlers.update = Wegas.Facade.Variable.after("update", this.syncUI, this);
this.afterHostEvent("render", this.syncUI, this);
},
/**
* @function
* @private
* @description call function 'getUnreadCount' to set the number of
* unread on the host.
*/
syncUI: function() {
this.updateCounter();
},
setCounterValue: function(unreadCount) {
var bb = this.get('host').get(BOUNDINGBOX),
target = bb.one(".wegas-unreadcount");
if (!target) { // If the counter span has not been rendered, do it
bb.append('<span class="wegas-unreadcount"></span>');
target = bb.one(".wegas-unreadcount");
}
if (unreadCount > 0) { // Update the content
target.setContent("<span class='value'>" + (this.get("displayValue") ? unreadCount : "") + "</span>");
} else {
target.setContent("");
}
bb.toggleClass("wegas-unreadcount", unreadCount > 0);
},
/**
* @function
* @private
* @description Detach all functions created by this widget
*/
destructor: function() {
for (var k in this.handlers) {
this.handlers[k].detach();
}
},
// *** Private methods *** //
/**
* @function
* @private
* @return Number of unread.
* @description Count the number of unread reply in given variable.
*/
updateCounter: function() {
var i, instance, /*messages,*/ items, count = 0, klass,
list = this.get('variable.evaluated'), descriptor, context = this, promises = [];
if (!list) {
return 0;
}
if (!Y.Lang.isArray(list)) {
list = [list];
}
descriptor = list.pop();
while (descriptor) {
klass = descriptor.get("@class");
if (klass === "ListDescriptor") {
items = descriptor.flatten();
for (i = 0; i < items.length; i = i + 1) {
list.push(items[i]);
}
} else {
if (this._counters[klass]) {
promises.push(new Y.Promise(function(resolve, reject) {
context._counters[klass](descriptor, descriptor.getInstance(), function(count) {
resolve(count);
});
}));
}
}
descriptor = list.pop();
}
Y.Promise.all(promises).then(function(allCounts) {
var total = 0, i;
for (i = 0; i < allCounts.length; i += 1) {
total += allCounts[i];
}
context.setCounterValue(total);
});
}
}, {
NS: "UnreadCount",
NAME: "UnreadCount",
/**
* @lends Y.Plugin.UnreadCount
*/
/**
* @field
* @static
* @description
* <p><strong>Attributes</strong></p>
* <ul>
* <li>variable: The target variable, returned either based on the name
* attribute, and if absent by evaluating the expr attribute.</li>
* </ul>
*/
ATTRS: {
/**
* The target variable, returned either based on the variableName attribute,
* and if absent by evaluating the expr attribute.
*/
variable: {
getter: Wegas.Widget.VARIABLEDESCRIPTORGETTER,
_inputex: {
_type: "variableselect",
label: "Unread count",
classFilter: ["ListDescriptor", "InboxDescriptor"]
}
},
displayValue: {
type: "boolean",
optional: true,
value: true
},
userCounters: {
type: "object",
value: {},
optional: true,
_inputex: {
type: "hidden"
}
}
}
});
Y.Plugin.UnreadCount = UnreadCount;
MarkAsUnread = Y.Base.create("wegas-mark-as-unread", Y.Plugin.UnreadCount, [], {
setCounterValue: function(unreadCount) {
if (unreadCount > 0) {
if (this.get("host") instanceof Y.Node) {
this.get("host").addClass("unread");
} else if (this.get("host") instanceof Y.Widget) {
this.get("host").get("boundingBox").addClass("unread");
} else {
Y.log("unread error...");
}
}
}
}, {
NS: "MarkAsUnread",
NAME: "MarkAsUnread",
ATTRS: {
}
});
Y.Plugin.MarkAsUnread = MarkAsUnread;
/**
* @name Y.Wegas.OpenPageButton
* @extends Y.Wegas.Button
* @class Shortcut to create a Button with an OpenPageAction plugin
* @constructor
* @description Shortcut to create a Button with an OpenPageAction plugin
*/
Wegas.OpenPageButton = Y.Base.create("button", Wegas.Button, [], {
/** @lends Y.Wegas.OpenPageButton# */
/**
* @function
* @private
* @param cfg
* @description plug the plugin "OpenPageAction" with a given
* configuration.
*/
initializer: function(cfg) {
this.plug(Y.Plugin.OpenPageAction, cfg);
}
});
Wegas.ToggleButton = Y.Base.create("button", Y.ToggleButton, [Y.WidgetChild, Wegas.Widget, Wegas.Editable], {}, {});
});
| wegas-app/src/main/webapp/wegas-app/js/widget/wegas-button.js | /*
* Wegas
* http://wegas.albasim.ch
*
* Copyright (c) 2013, 2014, 2015 School of Business and Engineering Vaud, Comem
* Licensed under the MIT License
*/
/**
* @fileoverview
* @author Francois-Xavier Aeberhard <[email protected]>
*/
YUI.add("wegas-button", function(Y) {
"use strict";
var CONTENTBOX = 'contentBox',
BOUNDINGBOX = 'boundingBox',
Wegas = Y.Wegas, Button, ToggleButton, MarkAsUnread;
/**
* @name Y.Wegas.Button
* @extends Y.Button
* @borrows Y.WidgetChild, Y.Wegas.Widget, Y.Wegas.Editable
* @class Custom Button implementation.
* @constructor
* @description Custom Button implementation. Adds Y.WidgetChild and
* Y.Wegas.Widget extensions to the original Y.Button
*/
Button = Y.Base.create("button", Y.Button, [Y.WidgetChild, Wegas.Widget, Wegas.Editable], {
/** @lends Y.Wegas.Button# */
// *** Private fields *** //
// *** Lifecycle Methods *** //
/**
* @function
* @private
* @description Set variable with initials values.
* Plug tooltip add given (by ATTRS) css class to contentbox
*/
initializer: function() {
Button.superclass.initializer.apply(this, arguments);
this.publish("click", {
emitFacade: true,
bubbles: true,
defaultFn: function() { // Force event activation by default
}
});
//this.constructor.CSS_PREFIX = "yui3-button"; // Revert changes done by Y.Wegas.Widget so styling will work
this._cssPrefix = "yui3-button";
if (this.get("cssClass")) {
this.get(CONTENTBOX).addClass(this.get("cssClass"));
}
if (this.get("tooltip")) {
this.plug(Y.Plugin.Tooltip, {
content: Y.Template.Micro.compile(this.get("tooltip"))()
});
}
},
getEditorLabel: function() {
return Wegas.Helper.stripHtml(this.get("label"));
},
/**
* @function
* @private
* @description Call widget parent to execute its proper render function.
* add "wegas-button" class to bounding box.
*/
renderUI: function() {
Button.superclass.renderUI.apply(this, arguments);
this.get(BOUNDINGBOX).addClass("wegas-button");
},
_getLabel: function(value) {
return value;
}
}, {
/**
* @lends Y.Wegas.Button
*/
EDITORNAME: "Button",
/**
* @field
* @static
* @description
* <p><strong>Attributes</strong></p>
* <ul>
* <li>label: the label of the button</li>
* <li>data: the data used by the button</li>
* <li>tooltip: the tooltip of the button</li>
* <li>disabled: boolean to choose state of the button</li>
* <li>cssClass: cssClass of the button</li>
* <li>plugins: impact to bind at the buttons</li>
* </ul>
*/
ATTRS: {
label: {
type: "string",
optional: true
},
labelHTML: {
"transient": true
},
data: {},
tooltip: {
type: "string",
optional: true,
"transient": true
},
disabled: {
type: "boolean",
_inputex: {
wrapperClassName: 'inputEx-fieldWrapper wegas-advanced-feature'
}
},
cssClass: {
value: null
}
}
});
Wegas.Button = Button;
/* @fixme @hack So we can display html tag inside a button */
Y.Button.prototype._setLabel = function(label, name, opts) {
if (!opts || opts.src !== 'internal') {
this.set('labelHTML', Y.Template.Micro.compile(label || "")(), {src: 'internal'});
}
return label;
};
/**
* Plugin which adds an unread message counter to a widget.
*
* @class Y.Wegas.UnreadCount
* @extends Y.Plugin.Base
* @borrows Y.Wegas.Plugin, Y.Wegas.Editable
*/
var UnreadCount = Y.Base.create("wegas-unreadCount", Y.Plugin.Base, [Wegas.Plugin, Wegas.Editable], {
/** @lends Y.Plugin.UnreadCount# */
// *** Private fields *** //
// *** Lifecycle methods *** //
/**
* @function
* @private
* @description Set variable with initials values.
*/
initializer: function() {
var k;
this.handlers = {};
this._counters = {
"InboxDescriptor": function(descriptor, instance, resolve) {
resolve(instance.get("unreadCount"));
},
"DialogueDescriptor": function(descriptor, instance, resolve) {
var state = descriptor.getCurrentState();
state.getAvailableActions(function(availableActions) {
resolve(availableActions.length > 0 ? 1 : 0);
});
},
"QuestionDescriptor": function(descriptor, instance, resolve) {
if (instance.get("replies")) {
resolve(instance.get("replies").length === 0 && instance.get("active") ? 1 : 0); // only count if it is active
}
resolve(0);
},
"PeerReviewDescriptor": function(descriptor, instance, resolve) {
var i, j, k, types = ["toReview", "reviewed"],
reviews, review,
counter = 0;
for (i = 0; i < 2; i++) {
reviews = instance.get(types[i]);
for (j = 0; j < reviews.length; j++) {
review = reviews[j];
if ((i === 0 && review.get("reviewState") === "DISPATCHED") ||
(i === 1 && review.get("reviewState") === "NOTIFIED")) {
counter++;
}
}
}
resolve(counter);
}
};
for (k in this.get("userCounters")) {
this._counters[k] = eval("(" + this.get("userCounters")[k] + ")");
}
this.bindUI();
},
/**
* @function
* @private
* @description bind function to events.
* When VariableDescriptorFacade is updated, do sync.
* When plugin's host is render, do sync.
*/
bindUI: function() {
this.handlers.update = Wegas.Facade.Variable.after("update", this.syncUI, this);
this.afterHostEvent("render", this.syncUI, this);
},
/**
* @function
* @private
* @description call function 'getUnreadCount' to set the number of
* unread on the host.
*/
syncUI: function() {
this.updateCounter();
},
setCounterValue: function(unreadCount) {
var bb = this.get('host').get(BOUNDINGBOX),
target = bb.one(".wegas-unreadcount");
if (!target) { // If the counter span has not been rendered, do it
bb.append('<span class="wegas-unreadcount"></span>');
target = bb.one(".wegas-unreadcount");
}
if (unreadCount > 0) { // Update the content
target.setContent("<span class='value'>" + unreadCount + "</span>");
} else {
target.setContent("");
}
bb.toggleClass("wegas-unreadcount", unreadCount > 0);
},
/**
* @function
* @private
* @description Detach all functions created by this widget
*/
destructor: function() {
for (var k in this.handlers) {
this.handlers[k].detach();
}
},
// *** Private methods *** //
/**
* @function
* @private
* @return Number of unread.
* @description Count the number of unread reply in given variable.
*/
updateCounter: function() {
var i, instance, /*messages,*/ items, count = 0, klass,
list = this.get('variable.evaluated'), descriptor, context = this, promises = [];
if (!list) {
return 0;
}
if (!Y.Lang.isArray(list)) {
list = [list];
}
descriptor = list.pop();
while (descriptor) {
klass = descriptor.get("@class");
if (klass === "ListDescriptor") {
items = descriptor.flatten();
for (i = 0; i < items.length; i = i + 1) {
list.push(items[i]);
}
} else {
if (this._counters[klass]) {
promises.push(new Y.Promise(function(resolve, reject) {
context._counters[klass](descriptor, descriptor.getInstance(), function(count) {
resolve(count);
});
}));
}
}
descriptor = list.pop();
}
Y.Promise.all(promises).then(function(allCounts) {
var total = 0, i;
for (i = 0; i < allCounts.length; i += 1) {
total += allCounts[i];
}
context.setCounterValue(total);
});
}
}, {
NS: "UnreadCount",
NAME: "UnreadCount",
/**
* @lends Y.Plugin.UnreadCount
*/
/**
* @field
* @static
* @description
* <p><strong>Attributes</strong></p>
* <ul>
* <li>variable: The target variable, returned either based on the name
* attribute, and if absent by evaluating the expr attribute.</li>
* </ul>
*/
ATTRS: {
/**
* The target variable, returned either based on the variableName attribute,
* and if absent by evaluating the expr attribute.
*/
variable: {
getter: Wegas.Widget.VARIABLEDESCRIPTORGETTER,
_inputex: {
_type: "variableselect",
label: "Unread count",
classFilter: ["ListDescriptor", "InboxDescriptor"]
}
},
userCounters: {
type: "object",
value: {},
optional: true,
_inputex: {
type: "hidden"
}
}
}
});
Y.Plugin.UnreadCount = UnreadCount;
MarkAsUnread = Y.Base.create("wegas-mark-as-unread", Y.Plugin.UnreadCount, [], {
setCounterValue: function(unreadCount) {
if (unreadCount > 0) {
if (this.get("host") instanceof Y.Node){
this.get("host").addClass("unread");
} else if (this.get("host") instanceof Y.Widget){
this.get("host").get("boundingBox").addClass("unread");
} else {
Y.log("unread error...");
}
}
}
}, {
NS: "MarkAsUnread",
NAME: "MarkAsUnread",
ATTRS: {
}
});
Y.Plugin.MarkAsUnread = MarkAsUnread;
/**
* @name Y.Wegas.OpenPageButton
* @extends Y.Wegas.Button
* @class Shortcut to create a Button with an OpenPageAction plugin
* @constructor
* @description Shortcut to create a Button with an OpenPageAction plugin
*/
Wegas.OpenPageButton = Y.Base.create("button", Wegas.Button, [], {
/** @lends Y.Wegas.OpenPageButton# */
/**
* @function
* @private
* @param cfg
* @description plug the plugin "OpenPageAction" with a given
* configuration.
*/
initializer: function(cfg) {
this.plug(Y.Plugin.OpenPageAction, cfg);
}
});
Wegas.ToggleButton = Y.Base.create("button", Y.ToggleButton, [Y.WidgetChild, Wegas.Widget, Wegas.Editable], {}, {});
});
| UnreadCount
* Fix Dialogue Unread count
* add option to hide the value | wegas-app/src/main/webapp/wegas-app/js/widget/wegas-button.js | UnreadCount | <ide><path>egas-app/src/main/webapp/wegas-app/js/widget/wegas-button.js
<ide> },
<ide> "DialogueDescriptor": function(descriptor, instance, resolve) {
<ide> var state = descriptor.getCurrentState();
<add> if (!instance.get("enabled")) {
<add> return false;
<add> }
<ide> state.getAvailableActions(function(availableActions) {
<ide> resolve(availableActions.length > 0 ? 1 : 0);
<ide> });
<ide> }
<ide>
<ide> if (unreadCount > 0) { // Update the content
<del> target.setContent("<span class='value'>" + unreadCount + "</span>");
<add> target.setContent("<span class='value'>" + (this.get("displayValue") ? unreadCount : "") + "</span>");
<ide> } else {
<ide> target.setContent("");
<ide> }
<ide> classFilter: ["ListDescriptor", "InboxDescriptor"]
<ide> }
<ide> },
<add> displayValue: {
<add> type: "boolean",
<add> optional: true,
<add> value: true
<add> },
<ide> userCounters: {
<ide> type: "object",
<ide> value: {},
<ide> MarkAsUnread = Y.Base.create("wegas-mark-as-unread", Y.Plugin.UnreadCount, [], {
<ide> setCounterValue: function(unreadCount) {
<ide> if (unreadCount > 0) {
<del> if (this.get("host") instanceof Y.Node){
<add> if (this.get("host") instanceof Y.Node) {
<ide> this.get("host").addClass("unread");
<del> } else if (this.get("host") instanceof Y.Widget){
<add> } else if (this.get("host") instanceof Y.Widget) {
<ide> this.get("host").get("boundingBox").addClass("unread");
<ide> } else {
<del> Y.log("unread error...");
<add> Y.log("unread error...");
<ide> }
<ide> }
<ide> } |
|
Java | apache-2.0 | 8d0c51dc9b6dac00158ad659465e489a76fa72cd | 0 | irfanah/jmeter,kschroeder/jmeter,vherilier/jmeter,ubikfsabbe/jmeter,max3163/jmeter,max3163/jmeter,tuanhq/jmeter,tuanhq/jmeter,d0k1/jmeter,liwangbest/jmeter,ubikfsabbe/jmeter,vherilier/jmeter,thomsonreuters/jmeter,ra0077/jmeter,DoctorQ/jmeter,irfanah/jmeter,etnetera/jmeter,vherilier/jmeter,ubikloadpack/jmeter,ubikloadpack/jmeter,hizhangqi/jmeter-1,hemikak/jmeter,tuanhq/jmeter,ThiagoGarciaAlves/jmeter,d0k1/jmeter,d0k1/jmeter,vherilier/jmeter,etnetera/jmeter,hemikak/jmeter,kschroeder/jmeter,hemikak/jmeter,DoctorQ/jmeter,ra0077/jmeter,etnetera/jmeter,ubikloadpack/jmeter,ubikfsabbe/jmeter,ThiagoGarciaAlves/jmeter,ThiagoGarciaAlves/jmeter,thomsonreuters/jmeter,hemikak/jmeter,ubikfsabbe/jmeter,kyroskoh/jmeter,kyroskoh/jmeter,d0k1/jmeter,thomsonreuters/jmeter,fj11/jmeter,max3163/jmeter,liwangbest/jmeter,etnetera/jmeter,hizhangqi/jmeter-1,hizhangqi/jmeter-1,ra0077/jmeter,ubikloadpack/jmeter,DoctorQ/jmeter,fj11/jmeter,etnetera/jmeter,max3163/jmeter,irfanah/jmeter,fj11/jmeter,liwangbest/jmeter,kschroeder/jmeter,kyroskoh/jmeter,ra0077/jmeter | // $Header$
/*
* Copyright 2003-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.extractor;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import junit.framework.TestCase;
import org.apache.jmeter.processor.PostProcessor;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.testelement.AbstractTestElement;
import org.apache.jmeter.testelement.property.IntegerProperty;
import org.apache.jmeter.threads.JMeterContext;
import org.apache.jmeter.threads.JMeterContextService;
import org.apache.jmeter.threads.JMeterVariables;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
import org.apache.oro.text.MalformedCachePatternException;
import org.apache.oro.text.PatternCacheLRU;
import org.apache.oro.text.regex.MatchResult;
import org.apache.oro.text.regex.Pattern;
import org.apache.oro.text.regex.PatternMatcher;
import org.apache.oro.text.regex.PatternMatcherInput;
import org.apache.oro.text.regex.Perl5Compiler;
import org.apache.oro.text.regex.Perl5Matcher;
import org.apache.oro.text.regex.Util;
/**
* @version $Revision$
*/
public class RegexExtractor
extends AbstractTestElement
implements PostProcessor, Serializable
{
transient private static Logger log = LoggingManager.getLoggerForClass();
public static final String REGEX = "RegexExtractor.regex";
public static final String REFNAME = "RegexExtractor.refname";
public static final String MATCH_NUMBER = "RegexExtractor.match_number";
public static final String DEFAULT = "RegexExtractor.default";
public static final String TEMPLATE = "RegexExtractor.template";
private Object[] template = null;
private static PatternCacheLRU patternCache =
new PatternCacheLRU(1000, new Perl5Compiler());
private static ThreadLocal localMatcher = new ThreadLocal()
{
protected Object initialValue()
{
return new Perl5Matcher();
}
};
/**
* Parses the response data using regular expressions and saving the results
* into variables for use later in the test.
* @see org.apache.jmeter.processor.PostProcessor#process()
*/
public void process()
{
initTemplate();
JMeterContext context = JMeterContextService.getContext();
if (context.getPreviousResult() == null
|| context.getPreviousResult().getResponseData() == null)
{
return;
}
log.debug("RegexExtractor processing result");
// Fetch some variables
JMeterVariables vars = context.getVariables();
String refName = getRefName();
int matchNumber = getMatchNumber();
vars.put(refName, getDefaultValue());
Perl5Matcher matcher = (Perl5Matcher) localMatcher.get();
PatternMatcherInput input =
new PatternMatcherInput(
new String(context.getPreviousResult().getResponseData()));
log.debug("Regex = " + getRegex());
try {
Pattern pattern =
patternCache.getPattern(getRegex(), Perl5Compiler.READ_ONLY_MASK);
List matches = new ArrayList();
int x = 0;
boolean done = false;
do
{
if (matcher.contains(input, pattern))
{
log.debug("RegexExtractor: Match found!");
matches.add(matcher.getMatch());
}
else
{
done = true;
}
x++;
}
while (x != matchNumber && !done);
try
{
MatchResult match;
if (matchNumber >= 0){// Original match behaviour
match = getCorrectMatch(matches, matchNumber);
if (match != null)
{
vars.put(refName, generateResult(match));
saveGroups(vars, refName, match);
}
}
else // < 0 means we save all the matches
{
int prevCount = 0;
String prevString=(String)vars.get(refName+"_matchNr");
if (prevString != null)
{
try
{
prevCount = Integer.parseInt(prevString);
}
catch (NumberFormatException e1)
{
// TODO Auto-generated catch block
e1.printStackTrace();
}
}
vars.put(refName+"_matchNr", ""+matches.size());// Save the count
for (int i=1;i<=matches.size();i++) {
match = getCorrectMatch(matches, i);
if (match != null)
{
vars.put(refName+"_"+i, generateResult(match));
saveGroups(vars, refName+"_"+i, match);
}
}
for (int i = matches.size()+1;i<=prevCount;i++)
{
vars.remove(refName+"_"+i);
vars.remove(refName+"_"+i+"_g0");// Remove known groups ...
vars.remove(refName+"_"+i+"_g1");// ...
//TODO remove other groups if present?
}
}
}
catch (RuntimeException e)
{
log.warn("Error while generating result");
}
} catch (MalformedCachePatternException e) {
log.warn("Error in pattern: "+ getRegex());
}
}
private void saveGroups(
JMeterVariables vars,
String basename,
MatchResult match)
{
StringBuffer buf = new StringBuffer();
for (int x = 0; x < match.groups(); x++)
{
buf.append(basename);
buf.append("_g");
buf.append(x);
vars.put(buf.toString(), match.group(x));
buf.setLength(0);
}
}
public Object clone()
{
RegexExtractor cloned = (RegexExtractor) super.clone();
cloned.template = this.template;
return cloned;
}
private String generateResult(MatchResult match)
{
StringBuffer result = new StringBuffer();
for (int a = 0; a < template.length; a++)
{
log.debug(
"RegexExtractor: Template piece #" + a + " = " + template[a]);
if (template[a] instanceof String)
{
result.append(template[a]);
}
else
{
result.append(match.group(((Integer) template[a]).intValue()));
}
}
log.debug("Regex Extractor result = " + result.toString());
return result.toString();
}
private void initTemplate()
{
if (template != null)
{
return;
}
List pieces = new ArrayList();
List combined = new LinkedList();
String rawTemplate = getTemplate();
PatternMatcher matcher = (Perl5Matcher) localMatcher.get();
Pattern templatePattern =
patternCache.getPattern(
"\\$(\\d+)\\$",
Perl5Compiler.READ_ONLY_MASK & Perl5Compiler.SINGLELINE_MASK);
log.debug("Pattern = " + templatePattern);
log.debug("template = " + rawTemplate);
Util.split(pieces, matcher, templatePattern, rawTemplate);
PatternMatcherInput input = new PatternMatcherInput(rawTemplate);
Iterator iter = pieces.iterator();
boolean startsWith = isFirstElementGroup(rawTemplate);
log.debug(
"template split into "
+ pieces.size()
+ " pieces, starts with = "
+ startsWith);
while (iter.hasNext())
{
boolean matchExists = matcher.contains(input, templatePattern);
if (startsWith)
{
if (matchExists)
{
combined.add(new Integer(matcher.getMatch().group(1)));
}
combined.add(iter.next());
}
else
{
combined.add(iter.next());
if (matchExists)
{
combined.add(new Integer(matcher.getMatch().group(1)));
}
}
}
if (matcher.contains(input, templatePattern))
{
log.debug("Template does end with template pattern");
combined.add(new Integer(matcher.getMatch().group(1)));
}
template = combined.toArray();
}
private boolean isFirstElementGroup(String rawData)
{
try
{
Pattern pattern =
patternCache.getPattern(
"^\\$\\d+\\$",
Perl5Compiler.READ_ONLY_MASK
& Perl5Compiler.SINGLELINE_MASK);
return ((Perl5Matcher) localMatcher.get()).contains(
rawData,
pattern);
}
catch (RuntimeException e)
{
log.error("", e);
return false;
}
}
/**
* Grab the appropriate result from the list.
* @param matches list of matches
* @param entry the entry number in the list
* @return MatchResult
*/
private MatchResult getCorrectMatch(List matches, int entry)
{
int matchSize = matches.size();
if (matchSize <= 0 || entry > matchSize) return null;
if (entry == 0) // Random match
{
return (MatchResult) matches.get(
JMeterUtils.getRandomInt(matchSize));
}
return (MatchResult) matches.get(entry - 1);
}
public void setRegex(String regex)
{
setProperty(REGEX, regex);
}
public String getRegex()
{
return getPropertyAsString(REGEX);
}
public void setRefName(String refName)
{
setProperty(REFNAME, refName);
}
public String getRefName()
{
return getPropertyAsString(REFNAME);
}
/**
* Set which Match to use. This can be any positive number, indicating the
* exact match to use, or 0, which is interpreted as meaning random.
* @param matchNumber
*/
public void setMatchNumber(int matchNumber)
{
setProperty(new IntegerProperty(MATCH_NUMBER, matchNumber));
}
public int getMatchNumber()
{
return getPropertyAsInt(MATCH_NUMBER);
}
/**
* Sets the value of the variable if no matches are found
* @param defaultValue
*/
public void setDefaultValue(String defaultValue)
{
setProperty(DEFAULT, defaultValue);
}
public String getDefaultValue()
{
return getPropertyAsString(DEFAULT);
}
public void setTemplate(String template)
{
setProperty(TEMPLATE, template);
}
public String getTemplate()
{
return getPropertyAsString(TEMPLATE);
}
public static class Test extends TestCase
{
RegexExtractor extractor;
SampleResult result;
JMeterVariables vars;
public Test(String name)
{
super(name);
}
public void setUp()
{
extractor = new RegexExtractor();
extractor.setRefName("regVal");
result = new SampleResult();
String data =
"<company-xmlext-query-ret>" +
"<row>" +
"<value field=\"RetCode\">LIS_OK</value>" +
"<value field=\"RetCodeExtension\"></value>" +
"<value field=\"alias\"></value>" +
"<value field=\"positioncount\"></value>" +
"<value field=\"invalidpincount\">0</value>" +
"<value field=\"pinposition1\">1</value>" +
"<value field=\"pinpositionvalue1\"></value>" +
"<value field=\"pinposition2\">5</value>" +
"<value field=\"pinpositionvalue2\"></value>" +
"<value field=\"pinposition3\">6</value>" +
"<value field=\"pinpositionvalue3\"></value>" +
"</row>" +
"</company-xmlext-query-ret>";
result.setResponseData(data.getBytes());
vars = new JMeterVariables();
JMeterContextService.getContext().setVariables(vars);
JMeterContextService.getContext().setPreviousResult(result);
}
public void testVariableExtraction() throws Exception
{
extractor.setRegex(
"<value field=\"(pinposition\\d+)\">(\\d+)</value>");
extractor.setTemplate("$2$");
extractor.setMatchNumber(2);
extractor.process();
assertEquals("5", vars.get("regVal"));
assertEquals("pinposition2", vars.get("regVal_g1"));
assertEquals("5", vars.get("regVal_g2"));
assertEquals("<value field=\"pinposition2\">5</value>", vars.get("regVal_g0"));
}
public void testVariableExtraction2() throws Exception
{
extractor.setRegex(
"<value field=\"(pinposition\\d+)\">(\\d+)</value>");
extractor.setTemplate("$1$");
extractor.setMatchNumber(3);
extractor.process();
assertEquals("pinposition3", vars.get("regVal"));
}
public void testVariableExtraction6() throws Exception
{
extractor.setRegex(
"<value field=\"(pinposition\\d+)\">(\\d+)</value>");
extractor.setTemplate("$2$");
extractor.setMatchNumber(4);
extractor.setDefaultValue("default");
extractor.process();
assertEquals("default", vars.get("regVal"));
}
public void testVariableExtraction3() throws Exception
{
extractor.setRegex(
"<value field=\"(pinposition\\d+)\">(\\d+)</value>");
extractor.setTemplate("_$1$");
extractor.setMatchNumber(2);
extractor.process();
assertEquals("_pinposition2", vars.get("regVal"));
}
public void testVariableExtraction5() throws Exception
{
extractor.setRegex(
"<value field=\"(pinposition\\d+)\">(\\d+)</value>");
extractor.setTemplate("$1$");
extractor.setMatchNumber(-1);
extractor.process();
assertEquals("3",vars.get("regVal_matchNr"));
assertEquals("pinposition1", vars.get("regVal_1"));
assertEquals("pinposition2", vars.get("regVal_2"));
assertEquals("pinposition3", vars.get("regVal_3"));
assertEquals("pinposition1", vars.get("regVal_1_g1"));
assertEquals("1", vars.get("regVal_1_g2"));
assertEquals("<value field=\"pinposition1\">1</value>", vars.get("regVal_1_g0"));
assertNull(vars.get("regVal_4"));
// Check old values don't hang around:
extractor.setRegex("(\\w+)count"); // fewer matches
extractor.process();
assertEquals("2",vars.get("regVal_matchNr"));
assertEquals("position", vars.get("regVal_1"));
assertEquals("invalidpin", vars.get("regVal_2"));
assertNull("Unused variables should be null",vars.get("regVal_3"));
assertNull("Unused variables should be null",vars.get("regVal_3_g0"));
assertNull("Unused variables should be null",vars.get("regVal_3_g1"));
}
}
}
| src/components/org/apache/jmeter/extractor/RegexExtractor.java | // $Header$
/*
* Copyright 2003-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.extractor;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import junit.framework.TestCase;
import org.apache.jmeter.processor.PostProcessor;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.testelement.AbstractTestElement;
import org.apache.jmeter.testelement.property.IntegerProperty;
import org.apache.jmeter.threads.JMeterContext;
import org.apache.jmeter.threads.JMeterContextService;
import org.apache.jmeter.threads.JMeterVariables;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
import org.apache.oro.text.MalformedCachePatternException;
import org.apache.oro.text.PatternCacheLRU;
import org.apache.oro.text.regex.MatchResult;
import org.apache.oro.text.regex.Pattern;
import org.apache.oro.text.regex.PatternMatcher;
import org.apache.oro.text.regex.PatternMatcherInput;
import org.apache.oro.text.regex.Perl5Compiler;
import org.apache.oro.text.regex.Perl5Matcher;
import org.apache.oro.text.regex.Util;
/**
* @version $Revision$
*/
public class RegexExtractor
extends AbstractTestElement
implements PostProcessor, Serializable
{
transient private static Logger log = LoggingManager.getLoggerForClass();
public static final String REGEX = "RegexExtractor.regex";
public static final String REFNAME = "RegexExtractor.refname";
public static final String MATCH_NUMBER = "RegexExtractor.match_number";
public static final String DEFAULT = "RegexExtractor.default";
public static final String TEMPLATE = "RegexExtractor.template";
private Object[] template = null;
private static PatternCacheLRU patternCache =
new PatternCacheLRU(1000, new Perl5Compiler());
private static ThreadLocal localMatcher = new ThreadLocal()
{
protected Object initialValue()
{
return new Perl5Matcher();
}
};
/**
* Parses the response data using regular expressions and saving the results
* into variables for use later in the test.
* @see org.apache.jmeter.processor.PostProcessor#process()
*/
public void process()
{
initTemplate();
JMeterContext context = JMeterContextService.getContext();
if (context.getPreviousResult() == null
|| context.getPreviousResult().getResponseData() == null)
{
return;
}
log.debug("RegexExtractor processing result");
// Fetch some variables
JMeterVariables vars = context.getVariables();
String refName = getRefName();
int matchNumber = getMatchNumber();
vars.put(refName, getDefaultValue());
Perl5Matcher matcher = (Perl5Matcher) localMatcher.get();
PatternMatcherInput input =
new PatternMatcherInput(
new String(context.getPreviousResult().getResponseData()));
log.debug("Regex = " + getRegex());
try {
Pattern pattern =
patternCache.getPattern(getRegex(), Perl5Compiler.READ_ONLY_MASK);
List matches = new ArrayList();
int x = 0;
boolean done = false;
do
{
if (matcher.contains(input, pattern))
{
log.debug("RegexExtractor: Match found!");
matches.add(matcher.getMatch());
}
else
{
done = true;
}
x++;
}
while (x != matchNumber && !done);
try
{
MatchResult match;
if (matchNumber >= 0){// Original match behaviour
match = getCorrectMatch(matches, matchNumber);
if (match != null)
{
vars.put(refName, generateResult(match));
saveGroups(vars, refName, match);
}
}
else // < 0 means we save all the matches
{
vars.put(refName+"_matchNr", ""+matches.size());// Save the count
for (int i=1;i<=matches.size();i++) {
match = getCorrectMatch(matches, i);
if (match != null)
{
vars.put(refName+"_"+i, generateResult(match));
saveGroups(vars, refName+"_"+i, match);
}
}
}
}
catch (RuntimeException e)
{
log.warn("Error while generating result");
}
} catch (MalformedCachePatternException e) {
log.warn("Error in pattern: "+ getRegex());
}
}
private void saveGroups(
JMeterVariables vars,
String basename,
MatchResult match)
{
StringBuffer buf = new StringBuffer();
for (int x = 0; x < match.groups(); x++)
{
buf.append(basename);
buf.append("_g");
buf.append(x);
vars.put(buf.toString(), match.group(x));
buf.setLength(0);
}
}
public Object clone()
{
RegexExtractor cloned = (RegexExtractor) super.clone();
cloned.template = this.template;
return cloned;
}
private String generateResult(MatchResult match)
{
StringBuffer result = new StringBuffer();
for (int a = 0; a < template.length; a++)
{
log.debug(
"RegexExtractor: Template piece #" + a + " = " + template[a]);
if (template[a] instanceof String)
{
result.append(template[a]);
}
else
{
result.append(match.group(((Integer) template[a]).intValue()));
}
}
log.debug("Regex Extractor result = " + result.toString());
return result.toString();
}
private void initTemplate()
{
if (template != null)
{
return;
}
List pieces = new ArrayList();
List combined = new LinkedList();
String rawTemplate = getTemplate();
PatternMatcher matcher = (Perl5Matcher) localMatcher.get();
Pattern templatePattern =
patternCache.getPattern(
"\\$(\\d+)\\$",
Perl5Compiler.READ_ONLY_MASK & Perl5Compiler.SINGLELINE_MASK);
log.debug("Pattern = " + templatePattern);
log.debug("template = " + rawTemplate);
Util.split(pieces, matcher, templatePattern, rawTemplate);
PatternMatcherInput input = new PatternMatcherInput(rawTemplate);
Iterator iter = pieces.iterator();
boolean startsWith = isFirstElementGroup(rawTemplate);
log.debug(
"template split into "
+ pieces.size()
+ " pieces, starts with = "
+ startsWith);
while (iter.hasNext())
{
boolean matchExists = matcher.contains(input, templatePattern);
if (startsWith)
{
if (matchExists)
{
combined.add(new Integer(matcher.getMatch().group(1)));
}
combined.add(iter.next());
}
else
{
combined.add(iter.next());
if (matchExists)
{
combined.add(new Integer(matcher.getMatch().group(1)));
}
}
}
if (matcher.contains(input, templatePattern))
{
log.debug("Template does end with template pattern");
combined.add(new Integer(matcher.getMatch().group(1)));
}
template = combined.toArray();
}
private boolean isFirstElementGroup(String rawData)
{
try
{
Pattern pattern =
patternCache.getPattern(
"^\\$\\d+\\$",
Perl5Compiler.READ_ONLY_MASK
& Perl5Compiler.SINGLELINE_MASK);
return ((Perl5Matcher) localMatcher.get()).contains(
rawData,
pattern);
}
catch (RuntimeException e)
{
log.error("", e);
return false;
}
}
/**
* Grab the appropriate result from the list.
* @param matches list of matches
* @param entry the entry number in the list
* @return MatchResult
*/
private MatchResult getCorrectMatch(List matches, int entry)
{
int matchSize = matches.size();
if (matchSize <= 0 || entry > matchSize) return null;
if (entry == 0) // Random match
{
return (MatchResult) matches.get(
JMeterUtils.getRandomInt(matchSize));
}
return (MatchResult) matches.get(entry - 1);
}
public void setRegex(String regex)
{
setProperty(REGEX, regex);
}
public String getRegex()
{
return getPropertyAsString(REGEX);
}
public void setRefName(String refName)
{
setProperty(REFNAME, refName);
}
public String getRefName()
{
return getPropertyAsString(REFNAME);
}
/**
* Set which Match to use. This can be any positive number, indicating the
* exact match to use, or 0, which is interpreted as meaning random.
* @param matchNumber
*/
public void setMatchNumber(int matchNumber)
{
setProperty(new IntegerProperty(MATCH_NUMBER, matchNumber));
}
public int getMatchNumber()
{
return getPropertyAsInt(MATCH_NUMBER);
}
/**
* Sets the value of the variable if no matches are found
* @param defaultValue
*/
public void setDefaultValue(String defaultValue)
{
setProperty(DEFAULT, defaultValue);
}
public String getDefaultValue()
{
return getPropertyAsString(DEFAULT);
}
public void setTemplate(String template)
{
setProperty(TEMPLATE, template);
}
public String getTemplate()
{
return getPropertyAsString(TEMPLATE);
}
public static class Test extends TestCase
{
RegexExtractor extractor;
SampleResult result;
JMeterVariables vars;
public Test(String name)
{
super(name);
}
public void setUp()
{
extractor = new RegexExtractor();
extractor.setRefName("regVal");
result = new SampleResult();
String data =
"<company-xmlext-query-ret>" +
"<row>" +
"<value field=\"RetCode\">LIS_OK</value>" +
"<value field=\"RetCodeExtension\"></value>" +
"<value field=\"alias\"></value>" +
"<value field=\"positioncount\"></value>" +
"<value field=\"invalidpincount\">0</value>" +
"<value field=\"pinposition1\">1</value>" +
"<value field=\"pinpositionvalue1\"></value>" +
"<value field=\"pinposition2\">5</value>" +
"<value field=\"pinpositionvalue2\"></value>" +
"<value field=\"pinposition3\">6</value>" +
"<value field=\"pinpositionvalue3\"></value>" +
"</row>" +
"</company-xmlext-query-ret>";
result.setResponseData(data.getBytes());
vars = new JMeterVariables();
JMeterContextService.getContext().setVariables(vars);
JMeterContextService.getContext().setPreviousResult(result);
}
public void testVariableExtraction() throws Exception
{
extractor.setRegex(
"<value field=\"(pinposition\\d+)\">(\\d+)</value>");
extractor.setTemplate("$2$");
extractor.setMatchNumber(2);
extractor.process();
assertEquals("5", vars.get("regVal"));
assertEquals("pinposition2", vars.get("regVal_g1"));
assertEquals("5", vars.get("regVal_g2"));
assertEquals("<value field=\"pinposition2\">5</value>", vars.get("regVal_g0"));
}
public void testVariableExtraction2() throws Exception
{
extractor.setRegex(
"<value field=\"(pinposition\\d+)\">(\\d+)</value>");
extractor.setTemplate("$1$");
extractor.setMatchNumber(3);
extractor.process();
assertEquals("pinposition3", vars.get("regVal"));
}
public void testVariableExtraction6() throws Exception
{
extractor.setRegex(
"<value field=\"(pinposition\\d+)\">(\\d+)</value>");
extractor.setTemplate("$2$");
extractor.setMatchNumber(4);
extractor.setDefaultValue("default");
extractor.process();
assertEquals("default", vars.get("regVal"));
}
public void testVariableExtraction3() throws Exception
{
extractor.setRegex(
"<value field=\"(pinposition\\d+)\">(\\d+)</value>");
extractor.setTemplate("_$1$");
extractor.setMatchNumber(2);
extractor.process();
assertEquals("_pinposition2", vars.get("regVal"));
}
public void testVariableExtraction5() throws Exception
{
extractor.setRegex(
"<value field=\"(pinposition\\d+)\">(\\d+)</value>");
extractor.setTemplate("_$1$");
extractor.setMatchNumber(-1);
extractor.process();
assertEquals("3",vars.get("regVal_matchNr"));
assertEquals("_pinposition1", vars.get("regVal_1"));
assertEquals("_pinposition2", vars.get("regVal_2"));
assertEquals("_pinposition3", vars.get("regVal_3"));
assertEquals("pinposition1", vars.get("regVal_1_g1"));
assertEquals("1", vars.get("regVal_1_g2"));
assertEquals("<value field=\"pinposition1\">1</value>", vars.get("regVal_1_g0"));
}
}
}
| Remove old Regex variables when fewer matches found later in thread
git-svn-id: 7c053b8fbd1fb5868f764c6f9536fc6a9bbe7da9@324313 13f79535-47bb-0310-9956-ffa450edef68
| src/components/org/apache/jmeter/extractor/RegexExtractor.java | Remove old Regex variables when fewer matches found later in thread | <ide><path>rc/components/org/apache/jmeter/extractor/RegexExtractor.java
<ide> }
<ide> else // < 0 means we save all the matches
<ide> {
<add> int prevCount = 0;
<add> String prevString=(String)vars.get(refName+"_matchNr");
<add> if (prevString != null)
<add> {
<add> try
<add> {
<add> prevCount = Integer.parseInt(prevString);
<add> }
<add> catch (NumberFormatException e1)
<add> {
<add> // TODO Auto-generated catch block
<add> e1.printStackTrace();
<add> }
<add> }
<ide> vars.put(refName+"_matchNr", ""+matches.size());// Save the count
<ide> for (int i=1;i<=matches.size();i++) {
<ide> match = getCorrectMatch(matches, i);
<ide> vars.put(refName+"_"+i, generateResult(match));
<ide> saveGroups(vars, refName+"_"+i, match);
<ide> }
<add> }
<add> for (int i = matches.size()+1;i<=prevCount;i++)
<add> {
<add> vars.remove(refName+"_"+i);
<add> vars.remove(refName+"_"+i+"_g0");// Remove known groups ...
<add> vars.remove(refName+"_"+i+"_g1");// ...
<add> //TODO remove other groups if present?
<ide> }
<ide> }
<ide> }
<ide> {
<ide> extractor.setRegex(
<ide> "<value field=\"(pinposition\\d+)\">(\\d+)</value>");
<del> extractor.setTemplate("_$1$");
<add> extractor.setTemplate("$1$");
<ide> extractor.setMatchNumber(-1);
<ide> extractor.process();
<ide> assertEquals("3",vars.get("regVal_matchNr"));
<del> assertEquals("_pinposition1", vars.get("regVal_1"));
<del> assertEquals("_pinposition2", vars.get("regVal_2"));
<del> assertEquals("_pinposition3", vars.get("regVal_3"));
<add> assertEquals("pinposition1", vars.get("regVal_1"));
<add> assertEquals("pinposition2", vars.get("regVal_2"));
<add> assertEquals("pinposition3", vars.get("regVal_3"));
<ide> assertEquals("pinposition1", vars.get("regVal_1_g1"));
<ide> assertEquals("1", vars.get("regVal_1_g2"));
<ide> assertEquals("<value field=\"pinposition1\">1</value>", vars.get("regVal_1_g0"));
<add> assertNull(vars.get("regVal_4"));
<add>
<add> // Check old values don't hang around:
<add> extractor.setRegex("(\\w+)count"); // fewer matches
<add> extractor.process();
<add> assertEquals("2",vars.get("regVal_matchNr"));
<add> assertEquals("position", vars.get("regVal_1"));
<add> assertEquals("invalidpin", vars.get("regVal_2"));
<add> assertNull("Unused variables should be null",vars.get("regVal_3"));
<add> assertNull("Unused variables should be null",vars.get("regVal_3_g0"));
<add> assertNull("Unused variables should be null",vars.get("regVal_3_g1"));
<ide> }
<ide> }
<ide> } |
|
Java | apache-2.0 | e3c96b10071a4f688e638fb4206c8d6a791de26a | 0 | ibmsoe/hbase,ibmsoe/hbase,ibmsoe/hbase,ibmsoe/hbase,ibmsoe/hbase,ibmsoe/hbase,ibmsoe/hbase,ibmsoe/hbase,ibmsoe/hbase,ibmsoe/hbase | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.EOFException;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Constructor;
import java.text.ParseException;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.RandomAccess;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompoundConfiguration;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.RegionTooBusyException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.backup.HFileArchiver;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.conf.ConfigurationManager;
import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.exceptions.RegionInRecoveryException;
import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterWrapper;
import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
import org.apache.hadoop.hbase.ipc.RpcCallContext;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall;
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor;
import org.apache.hadoop.hbase.regionserver.MultiVersionConsistencyControl.WriteEntry;
import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionThroughputController;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionThroughputControllerFactory;
import org.apache.hadoop.hbase.regionserver.compactions.NoLimitCompactionThroughputController;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.MetricsWAL;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.regionserver.wal.ReplayHLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.regionserver.wal.WALUtil;
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.CompressionTest;
import org.apache.hadoop.hbase.util.Counter;
import org.apache.hadoop.hbase.util.EncryptionTest;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HashedBytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;
import org.apache.hadoop.io.MultipleIOException;
import org.apache.hadoop.util.StringUtils;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.io.Closeables;
import com.google.protobuf.ByteString;
import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
import com.google.protobuf.TextFormat;
@InterfaceAudience.Private
public class HRegion implements HeapSize, PropagatingConfigurationObserver, Region {
public static final Log LOG = LogFactory.getLog(HRegion.class);
public static final String LOAD_CFS_ON_DEMAND_CONFIG_KEY =
"hbase.hregion.scan.loadColumnFamiliesOnDemand";
/**
* This is the global default value for durability. All tables/mutations not
* defining a durability or using USE_DEFAULT will default to this value.
*/
private static final Durability DEFAULT_DURABILITY = Durability.SYNC_WAL;
final AtomicBoolean closed = new AtomicBoolean(false);
/* Closing can take some time; use the closing flag if there is stuff we don't
* want to do while in closing state; e.g. like offer this region up to the
* master as a region to close if the carrying regionserver is overloaded.
* Once set, it is never cleared.
*/
final AtomicBoolean closing = new AtomicBoolean(false);
/**
* The max sequence id of flushed data on this region. Used doing some rough calculations on
* whether time to flush or not.
*/
private volatile long maxFlushedSeqId = HConstants.NO_SEQNUM;
/**
* Record the sequence id of last flush operation.
*/
private volatile long lastFlushOpSeqId = HConstants.NO_SEQNUM;
/**
* Region scoped edit sequence Id. Edits to this region are GUARANTEED to appear in the WAL
* file in this sequence id's order; i.e. edit #2 will be in the WAL after edit #1.
* Its default value is -1L. This default is used as a marker to indicate
* that the region hasn't opened yet. Once it is opened, it is set to the derived
* {@link #openSeqNum}, the largest sequence id of all hfiles opened under this Region.
*
* <p>Control of this sequence is handed off to the WAL implementation. It is responsible
* for tagging edits with the correct sequence id since it is responsible for getting the
* edits into the WAL files. It controls updating the sequence id value. DO NOT UPDATE IT
* OUTSIDE OF THE WAL. The value you get will not be what you think it is.
*/
private final AtomicLong sequenceId = new AtomicLong(-1L);
/**
* The sequence id of the last replayed open region event from the primary region. This is used
* to skip entries before this due to the possibility of replay edits coming out of order from
* replication.
*/
protected volatile long lastReplayedOpenRegionSeqId = -1L;
protected volatile long lastReplayedCompactionSeqId = -1L;
//////////////////////////////////////////////////////////////////////////////
// Members
//////////////////////////////////////////////////////////////////////////////
// map from a locked row to the context for that lock including:
// - CountDownLatch for threads waiting on that row
// - the thread that owns the lock (allow reentrancy)
// - reference count of (reentrant) locks held by the thread
// - the row itself
private final ConcurrentHashMap<HashedBytes, RowLockContext> lockedRows =
new ConcurrentHashMap<HashedBytes, RowLockContext>();
protected final Map<byte[], Store> stores = new ConcurrentSkipListMap<byte[], Store>(
Bytes.BYTES_RAWCOMPARATOR);
// TODO: account for each registered handler in HeapSize computation
private Map<String, Service> coprocessorServiceHandlers = Maps.newHashMap();
public final AtomicLong memstoreSize = new AtomicLong(0);
// Debug possible data loss due to WAL off
final Counter numMutationsWithoutWAL = new Counter();
final Counter dataInMemoryWithoutWAL = new Counter();
// Debug why CAS operations are taking a while.
final Counter checkAndMutateChecksPassed = new Counter();
final Counter checkAndMutateChecksFailed = new Counter();
//Number of requests
final Counter readRequestsCount = new Counter();
final Counter writeRequestsCount = new Counter();
// Number of requests blocked by memstore size.
private final Counter blockedRequestsCount = new Counter();
// Compaction counters
final AtomicLong compactionsFinished = new AtomicLong(0L);
final AtomicLong compactionNumFilesCompacted = new AtomicLong(0L);
final AtomicLong compactionNumBytesCompacted = new AtomicLong(0L);
private final WAL wal;
private final HRegionFileSystem fs;
protected final Configuration conf;
private final Configuration baseConf;
private final KeyValue.KVComparator comparator;
private final int rowLockWaitDuration;
static final int DEFAULT_ROWLOCK_WAIT_DURATION = 30000;
// The internal wait duration to acquire a lock before read/update
// from the region. It is not per row. The purpose of this wait time
// is to avoid waiting a long time while the region is busy, so that
// we can release the IPC handler soon enough to improve the
// availability of the region server. It can be adjusted by
// tuning configuration "hbase.busy.wait.duration".
final long busyWaitDuration;
static final long DEFAULT_BUSY_WAIT_DURATION = HConstants.DEFAULT_HBASE_RPC_TIMEOUT;
// If updating multiple rows in one call, wait longer,
// i.e. waiting for busyWaitDuration * # of rows. However,
// we can limit the max multiplier.
final int maxBusyWaitMultiplier;
// Max busy wait duration. There is no point to wait longer than the RPC
// purge timeout, when a RPC call will be terminated by the RPC engine.
final long maxBusyWaitDuration;
// negative number indicates infinite timeout
static final long DEFAULT_ROW_PROCESSOR_TIMEOUT = 60 * 1000L;
final ExecutorService rowProcessorExecutor = Executors.newCachedThreadPool();
private final ConcurrentHashMap<RegionScanner, Long> scannerReadPoints;
/**
* The sequence ID that was encountered when this region was opened.
*/
private long openSeqNum = HConstants.NO_SEQNUM;
/**
* The default setting for whether to enable on-demand CF loading for
* scan requests to this region. Requests can override it.
*/
private boolean isLoadingCfsOnDemandDefault = false;
private final AtomicInteger majorInProgress = new AtomicInteger(0);
private final AtomicInteger minorInProgress = new AtomicInteger(0);
//
// Context: During replay we want to ensure that we do not lose any data. So, we
// have to be conservative in how we replay wals. For each store, we calculate
// the maxSeqId up to which the store was flushed. And, skip the edits which
// are equal to or lower than maxSeqId for each store.
// The following map is populated when opening the region
Map<byte[], Long> maxSeqIdInStores = new TreeMap<byte[], Long>(Bytes.BYTES_COMPARATOR);
/** Saved state from replaying prepare flush cache */
private PrepareFlushResult prepareFlushResult = null;
/**
* Config setting for whether to allow writes when a region is in recovering or not.
*/
private boolean disallowWritesInRecovering = false;
// when a region is in recovering state, it can only accept writes not reads
private volatile boolean isRecovering = false;
private volatile Optional<ConfigurationManager> configurationManager;
/**
* @return The smallest mvcc readPoint across all the scanners in this
* region. Writes older than this readPoint, are included in every
* read operation.
*/
public long getSmallestReadPoint() {
long minimumReadPoint;
// We need to ensure that while we are calculating the smallestReadPoint
// no new RegionScanners can grab a readPoint that we are unaware of.
// We achieve this by synchronizing on the scannerReadPoints object.
synchronized(scannerReadPoints) {
minimumReadPoint = mvcc.memstoreReadPoint();
for (Long readPoint: this.scannerReadPoints.values()) {
if (readPoint < minimumReadPoint) {
minimumReadPoint = readPoint;
}
}
}
return minimumReadPoint;
}
/*
* Data structure of write state flags used coordinating flushes,
* compactions and closes.
*/
static class WriteState {
// Set while a memstore flush is happening.
volatile boolean flushing = false;
// Set when a flush has been requested.
volatile boolean flushRequested = false;
// Number of compactions running.
volatile int compacting = 0;
// Gets set in close. If set, cannot compact or flush again.
volatile boolean writesEnabled = true;
// Set if region is read-only
volatile boolean readOnly = false;
// whether the reads are enabled. This is different than readOnly, because readOnly is
// static in the lifetime of the region, while readsEnabled is dynamic
volatile boolean readsEnabled = true;
/**
* Set flags that make this region read-only.
*
* @param onOff flip value for region r/o setting
*/
synchronized void setReadOnly(final boolean onOff) {
this.writesEnabled = !onOff;
this.readOnly = onOff;
}
boolean isReadOnly() {
return this.readOnly;
}
boolean isFlushRequested() {
return this.flushRequested;
}
void setReadsEnabled(boolean readsEnabled) {
this.readsEnabled = readsEnabled;
}
static final long HEAP_SIZE = ClassSize.align(
ClassSize.OBJECT + 5 * Bytes.SIZEOF_BOOLEAN);
}
/**
* Objects from this class are created when flushing to describe all the different states that
* that method ends up in. The Result enum describes those states. The sequence id should only
* be specified if the flush was successful, and the failure message should only be specified
* if it didn't flush.
*/
public static class FlushResultImpl implements FlushResult {
final Result result;
final String failureReason;
final long flushSequenceId;
final boolean wroteFlushWalMarker;
/**
* Convenience constructor to use when the flush is successful, the failure message is set to
* null.
* @param result Expecting FLUSHED_NO_COMPACTION_NEEDED or FLUSHED_COMPACTION_NEEDED.
* @param flushSequenceId Generated sequence id that comes right after the edits in the
* memstores.
*/
FlushResultImpl(Result result, long flushSequenceId) {
this(result, flushSequenceId, null, false);
assert result == Result.FLUSHED_NO_COMPACTION_NEEDED || result == Result
.FLUSHED_COMPACTION_NEEDED;
}
/**
* Convenience constructor to use when we cannot flush.
* @param result Expecting CANNOT_FLUSH_MEMSTORE_EMPTY or CANNOT_FLUSH.
* @param failureReason Reason why we couldn't flush.
*/
FlushResultImpl(Result result, String failureReason, boolean wroteFlushMarker) {
this(result, -1, failureReason, wroteFlushMarker);
assert result == Result.CANNOT_FLUSH_MEMSTORE_EMPTY || result == Result.CANNOT_FLUSH;
}
/**
* Constructor with all the parameters.
* @param result Any of the Result.
* @param flushSequenceId Generated sequence id if the memstores were flushed else -1.
* @param failureReason Reason why we couldn't flush, or null.
*/
FlushResultImpl(Result result, long flushSequenceId, String failureReason,
boolean wroteFlushMarker) {
this.result = result;
this.flushSequenceId = flushSequenceId;
this.failureReason = failureReason;
this.wroteFlushWalMarker = wroteFlushMarker;
}
/**
* Convenience method, the equivalent of checking if result is
* FLUSHED_NO_COMPACTION_NEEDED or FLUSHED_NO_COMPACTION_NEEDED.
* @return true if the memstores were flushed, else false.
*/
public boolean isFlushSucceeded() {
return result == Result.FLUSHED_NO_COMPACTION_NEEDED || result == Result
.FLUSHED_COMPACTION_NEEDED;
}
/**
* Convenience method, the equivalent of checking if result is FLUSHED_COMPACTION_NEEDED.
* @return True if the flush requested a compaction, else false (doesn't even mean it flushed).
*/
public boolean isCompactionNeeded() {
return result == Result.FLUSHED_COMPACTION_NEEDED;
}
@Override
public String toString() {
return new StringBuilder()
.append("flush result:").append(result).append(", ")
.append("failureReason:").append(failureReason).append(",")
.append("flush seq id").append(flushSequenceId).toString();
}
@Override
public Result getResult() {
return result;
}
}
/** A result object from prepare flush cache stage */
@VisibleForTesting
static class PrepareFlushResult {
final FlushResult result; // indicating a failure result from prepare
final TreeMap<byte[], StoreFlushContext> storeFlushCtxs;
final TreeMap<byte[], List<Path>> committedFiles;
final long startTime;
final long flushOpSeqId;
final long flushedSeqId;
final long totalFlushableSize;
/** Constructs an early exit case */
PrepareFlushResult(FlushResult result, long flushSeqId) {
this(result, null, null, Math.max(0, flushSeqId), 0, 0, 0);
}
/** Constructs a successful prepare flush result */
PrepareFlushResult(
TreeMap<byte[], StoreFlushContext> storeFlushCtxs,
TreeMap<byte[], List<Path>> committedFiles, long startTime, long flushSeqId,
long flushedSeqId, long totalFlushableSize) {
this(null, storeFlushCtxs, committedFiles, startTime,
flushSeqId, flushedSeqId, totalFlushableSize);
}
private PrepareFlushResult(
FlushResult result,
TreeMap<byte[], StoreFlushContext> storeFlushCtxs,
TreeMap<byte[], List<Path>> committedFiles, long startTime, long flushSeqId,
long flushedSeqId, long totalFlushableSize) {
this.result = result;
this.storeFlushCtxs = storeFlushCtxs;
this.committedFiles = committedFiles;
this.startTime = startTime;
this.flushOpSeqId = flushSeqId;
this.flushedSeqId = flushedSeqId;
this.totalFlushableSize = totalFlushableSize;
}
public FlushResult getResult() {
return this.result;
}
}
final WriteState writestate = new WriteState();
long memstoreFlushSize;
final long timestampSlop;
final long rowProcessorTimeout;
// Last flush time for each Store. Useful when we are flushing for each column
private final ConcurrentMap<Store, Long> lastStoreFlushTimeMap =
new ConcurrentHashMap<Store, Long>();
final RegionServerServices rsServices;
private RegionServerAccounting rsAccounting;
private long flushCheckInterval;
// flushPerChanges is to prevent too many changes in memstore
private long flushPerChanges;
private long blockingMemStoreSize;
final long threadWakeFrequency;
// Used to guard closes
final ReentrantReadWriteLock lock =
new ReentrantReadWriteLock();
// Stop updates lock
private final ReentrantReadWriteLock updatesLock =
new ReentrantReadWriteLock();
private boolean splitRequest;
private byte[] explicitSplitPoint = null;
private final MultiVersionConsistencyControl mvcc =
new MultiVersionConsistencyControl();
// Coprocessor host
private RegionCoprocessorHost coprocessorHost;
private HTableDescriptor htableDescriptor = null;
private RegionSplitPolicy splitPolicy;
private FlushPolicy flushPolicy;
private final MetricsRegion metricsRegion;
private final MetricsRegionWrapperImpl metricsRegionWrapper;
private final Durability durability;
private final boolean regionStatsEnabled;
/**
* HRegion constructor. This constructor should only be used for testing and
* extensions. Instances of HRegion should be instantiated with the
* {@link HRegion#createHRegion} or {@link HRegion#openHRegion} method.
*
* @param tableDir qualified path of directory where region should be located,
* usually the table directory.
* @param wal The WAL is the outbound log for any updates to the HRegion
* The wal file is a logfile from the previous execution that's
* custom-computed for this HRegion. The HRegionServer computes and sorts the
* appropriate wal info for this HRegion. If there is a previous wal file
* (implying that the HRegion has been written-to before), then read it from
* the supplied path.
* @param fs is the filesystem.
* @param confParam is global configuration settings.
* @param regionInfo - HRegionInfo that describes the region
* is new), then read them from the supplied path.
* @param htd the table descriptor
* @param rsServices reference to {@link RegionServerServices} or null
*/
@Deprecated
public HRegion(final Path tableDir, final WAL wal, final FileSystem fs,
final Configuration confParam, final HRegionInfo regionInfo,
final HTableDescriptor htd, final RegionServerServices rsServices) {
this(new HRegionFileSystem(confParam, fs, tableDir, regionInfo),
wal, confParam, htd, rsServices);
}
/**
* HRegion constructor. This constructor should only be used for testing and
* extensions. Instances of HRegion should be instantiated with the
* {@link HRegion#createHRegion} or {@link HRegion#openHRegion} method.
*
* @param fs is the filesystem.
* @param wal The WAL is the outbound log for any updates to the HRegion
* The wal file is a logfile from the previous execution that's
* custom-computed for this HRegion. The HRegionServer computes and sorts the
* appropriate wal info for this HRegion. If there is a previous wal file
* (implying that the HRegion has been written-to before), then read it from
* the supplied path.
* @param confParam is global configuration settings.
* @param htd the table descriptor
* @param rsServices reference to {@link RegionServerServices} or null
*/
public HRegion(final HRegionFileSystem fs, final WAL wal, final Configuration confParam,
final HTableDescriptor htd, final RegionServerServices rsServices) {
if (htd == null) {
throw new IllegalArgumentException("Need table descriptor");
}
if (confParam instanceof CompoundConfiguration) {
throw new IllegalArgumentException("Need original base configuration");
}
this.comparator = fs.getRegionInfo().getComparator();
this.wal = wal;
this.fs = fs;
// 'conf' renamed to 'confParam' b/c we use this.conf in the constructor
this.baseConf = confParam;
this.conf = new CompoundConfiguration()
.add(confParam)
.addStringMap(htd.getConfiguration())
.addWritableMap(htd.getValues());
this.flushCheckInterval = conf.getInt(MEMSTORE_PERIODIC_FLUSH_INTERVAL,
DEFAULT_CACHE_FLUSH_INTERVAL);
this.flushPerChanges = conf.getLong(MEMSTORE_FLUSH_PER_CHANGES, DEFAULT_FLUSH_PER_CHANGES);
if (this.flushPerChanges > MAX_FLUSH_PER_CHANGES) {
throw new IllegalArgumentException(MEMSTORE_FLUSH_PER_CHANGES + " can not exceed "
+ MAX_FLUSH_PER_CHANGES);
}
this.rowLockWaitDuration = conf.getInt("hbase.rowlock.wait.duration",
DEFAULT_ROWLOCK_WAIT_DURATION);
this.isLoadingCfsOnDemandDefault = conf.getBoolean(LOAD_CFS_ON_DEMAND_CONFIG_KEY, true);
this.htableDescriptor = htd;
this.rsServices = rsServices;
this.threadWakeFrequency = conf.getLong(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);
setHTableSpecificConf();
this.scannerReadPoints = new ConcurrentHashMap<RegionScanner, Long>();
this.busyWaitDuration = conf.getLong(
"hbase.busy.wait.duration", DEFAULT_BUSY_WAIT_DURATION);
this.maxBusyWaitMultiplier = conf.getInt("hbase.busy.wait.multiplier.max", 2);
if (busyWaitDuration * maxBusyWaitMultiplier <= 0L) {
throw new IllegalArgumentException("Invalid hbase.busy.wait.duration ("
+ busyWaitDuration + ") or hbase.busy.wait.multiplier.max ("
+ maxBusyWaitMultiplier + "). Their product should be positive");
}
this.maxBusyWaitDuration = conf.getLong("hbase.ipc.client.call.purge.timeout",
2 * HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
/*
* timestamp.slop provides a server-side constraint on the timestamp. This
* assumes that you base your TS around currentTimeMillis(). In this case,
* throw an error to the user if the user-specified TS is newer than now +
* slop. LATEST_TIMESTAMP == don't use this functionality
*/
this.timestampSlop = conf.getLong(
"hbase.hregion.keyvalue.timestamp.slop.millisecs",
HConstants.LATEST_TIMESTAMP);
/**
* Timeout for the process time in processRowsWithLocks().
* Use -1 to switch off time bound.
*/
this.rowProcessorTimeout = conf.getLong(
"hbase.hregion.row.processor.timeout", DEFAULT_ROW_PROCESSOR_TIMEOUT);
this.durability = htd.getDurability() == Durability.USE_DEFAULT
? DEFAULT_DURABILITY
: htd.getDurability();
if (rsServices != null) {
this.rsAccounting = this.rsServices.getRegionServerAccounting();
// don't initialize coprocessors if not running within a regionserver
// TODO: revisit if coprocessors should load in other cases
this.coprocessorHost = new RegionCoprocessorHost(this, rsServices, conf);
this.metricsRegionWrapper = new MetricsRegionWrapperImpl(this);
this.metricsRegion = new MetricsRegion(this.metricsRegionWrapper);
Map<String, Region> recoveringRegions = rsServices.getRecoveringRegions();
String encodedName = getRegionInfo().getEncodedName();
if (recoveringRegions != null && recoveringRegions.containsKey(encodedName)) {
this.isRecovering = true;
recoveringRegions.put(encodedName, this);
}
} else {
this.metricsRegionWrapper = null;
this.metricsRegion = null;
}
if (LOG.isDebugEnabled()) {
// Write out region name as string and its encoded name.
LOG.debug("Instantiated " + this);
}
// by default, we allow writes against a region when it's in recovering
this.disallowWritesInRecovering =
conf.getBoolean(HConstants.DISALLOW_WRITES_IN_RECOVERING,
HConstants.DEFAULT_DISALLOW_WRITES_IN_RECOVERING_CONFIG);
configurationManager = Optional.absent();
// disable stats tracking system tables, but check the config for everything else
this.regionStatsEnabled = htd.getTableName().getNamespaceAsString().equals(
NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR) ?
false :
conf.getBoolean(HConstants.ENABLE_CLIENT_BACKPRESSURE,
HConstants.DEFAULT_ENABLE_CLIENT_BACKPRESSURE);
}
void setHTableSpecificConf() {
if (this.htableDescriptor == null) return;
long flushSize = this.htableDescriptor.getMemStoreFlushSize();
if (flushSize <= 0) {
flushSize = conf.getLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE,
HTableDescriptor.DEFAULT_MEMSTORE_FLUSH_SIZE);
}
this.memstoreFlushSize = flushSize;
this.blockingMemStoreSize = this.memstoreFlushSize *
conf.getLong("hbase.hregion.memstore.block.multiplier", 2);
}
/**
* Initialize this region.
* Used only by tests and SplitTransaction to reopen the region.
* You should use createHRegion() or openHRegion()
* @return What the next sequence (edit) id should be.
* @throws IOException e
* @deprecated use HRegion.createHRegion() or HRegion.openHRegion()
*/
@Deprecated
public long initialize() throws IOException {
return initialize(null);
}
/**
* Initialize this region.
*
* @param reporter Tickle every so often if initialize is taking a while.
* @return What the next sequence (edit) id should be.
* @throws IOException e
*/
private long initialize(final CancelableProgressable reporter) throws IOException {
MonitoredTask status = TaskMonitor.get().createStatus("Initializing region " + this);
long nextSeqId = -1;
try {
nextSeqId = initializeRegionInternals(reporter, status);
return nextSeqId;
} finally {
// nextSeqid will be -1 if the initialization fails.
// At least it will be 0 otherwise.
if (nextSeqId == -1) {
status.abort("Exception during region " + getRegionInfo().getRegionNameAsString() +
" initialization.");
}
}
}
private long initializeRegionInternals(final CancelableProgressable reporter,
final MonitoredTask status) throws IOException {
if (coprocessorHost != null) {
status.setStatus("Running coprocessor pre-open hook");
coprocessorHost.preOpen();
}
// Write HRI to a file in case we need to recover hbase:meta
status.setStatus("Writing region info on filesystem");
fs.checkRegionInfoOnFilesystem();
// Initialize all the HStores
status.setStatus("Initializing all the Stores");
long maxSeqId = initializeRegionStores(reporter, status, false);
this.lastReplayedOpenRegionSeqId = maxSeqId;
this.writestate.setReadOnly(ServerRegionReplicaUtil.isReadOnly(this));
this.writestate.flushRequested = false;
this.writestate.compacting = 0;
if (this.writestate.writesEnabled) {
// Remove temporary data left over from old regions
status.setStatus("Cleaning up temporary data from old regions");
fs.cleanupTempDir();
}
if (this.writestate.writesEnabled) {
status.setStatus("Cleaning up detritus from prior splits");
// Get rid of any splits or merges that were lost in-progress. Clean out
// these directories here on open. We may be opening a region that was
// being split but we crashed in the middle of it all.
fs.cleanupAnySplitDetritus();
fs.cleanupMergesDir();
}
// Initialize split policy
this.splitPolicy = RegionSplitPolicy.create(this, conf);
// Initialize flush policy
this.flushPolicy = FlushPolicyFactory.create(this, conf);
long lastFlushTime = EnvironmentEdgeManager.currentTime();
for (Store store: stores.values()) {
this.lastStoreFlushTimeMap.put(store, lastFlushTime);
}
// Use maximum of log sequenceid or that which was found in stores
// (particularly if no recovered edits, seqid will be -1).
long nextSeqid = maxSeqId;
// In distributedLogReplay mode, we don't know the last change sequence number because region
// is opened before recovery completes. So we add a safety bumper to avoid new sequence number
// overlaps used sequence numbers
if (this.writestate.writesEnabled) {
nextSeqid = WALSplitter.writeRegionSequenceIdFile(this.fs.getFileSystem(), this.fs
.getRegionDir(), nextSeqid, (this.isRecovering ? (this.flushPerChanges + 10000000) : 1));
} else {
nextSeqid++;
}
LOG.info("Onlined " + this.getRegionInfo().getShortNameToLog() +
"; next sequenceid=" + nextSeqid);
// A region can be reopened if failed a split; reset flags
this.closing.set(false);
this.closed.set(false);
if (coprocessorHost != null) {
status.setStatus("Running coprocessor post-open hooks");
coprocessorHost.postOpen();
}
status.markComplete("Region opened successfully");
return nextSeqid;
}
private long initializeRegionStores(final CancelableProgressable reporter, MonitoredTask status,
boolean warmupOnly)
throws IOException {
// Load in all the HStores.
long maxSeqId = -1;
// initialized to -1 so that we pick up MemstoreTS from column families
long maxMemstoreTS = -1;
if (!htableDescriptor.getFamilies().isEmpty()) {
// initialize the thread pool for opening stores in parallel.
ThreadPoolExecutor storeOpenerThreadPool =
getStoreOpenAndCloseThreadPool("StoreOpener-" + this.getRegionInfo().getShortNameToLog());
CompletionService<HStore> completionService =
new ExecutorCompletionService<HStore>(storeOpenerThreadPool);
// initialize each store in parallel
for (final HColumnDescriptor family : htableDescriptor.getFamilies()) {
status.setStatus("Instantiating store for column family " + family);
completionService.submit(new Callable<HStore>() {
@Override
public HStore call() throws IOException {
return instantiateHStore(family);
}
});
}
boolean allStoresOpened = false;
try {
for (int i = 0; i < htableDescriptor.getFamilies().size(); i++) {
Future<HStore> future = completionService.take();
HStore store = future.get();
this.stores.put(store.getFamily().getName(), store);
long storeMaxSequenceId = store.getMaxSequenceId();
maxSeqIdInStores.put(store.getColumnFamilyName().getBytes(),
storeMaxSequenceId);
if (maxSeqId == -1 || storeMaxSequenceId > maxSeqId) {
maxSeqId = storeMaxSequenceId;
}
long maxStoreMemstoreTS = store.getMaxMemstoreTS();
if (maxStoreMemstoreTS > maxMemstoreTS) {
maxMemstoreTS = maxStoreMemstoreTS;
}
}
allStoresOpened = true;
} catch (InterruptedException e) {
throw (InterruptedIOException)new InterruptedIOException().initCause(e);
} catch (ExecutionException e) {
throw new IOException(e.getCause());
} finally {
storeOpenerThreadPool.shutdownNow();
if (!allStoresOpened) {
// something went wrong, close all opened stores
LOG.error("Could not initialize all stores for the region=" + this);
for (Store store : this.stores.values()) {
try {
store.close();
} catch (IOException e) {
LOG.warn(e.getMessage());
}
}
}
}
}
if (ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this) && !warmupOnly) {
// Recover any edits if available.
maxSeqId = Math.max(maxSeqId, replayRecoveredEditsIfAny(
this.fs.getRegionDir(), maxSeqIdInStores, reporter, status));
}
maxSeqId = Math.max(maxSeqId, maxMemstoreTS + 1);
mvcc.initialize(maxSeqId);
return maxSeqId;
}
private void initializeWarmup(final CancelableProgressable reporter) throws IOException {
MonitoredTask status = TaskMonitor.get().createStatus("Initializing region " + this);
// Initialize all the HStores
status.setStatus("Warming up all the Stores");
initializeRegionStores(reporter, status, true);
}
private void writeRegionOpenMarker(WAL wal, long openSeqId) throws IOException {
Map<byte[], List<Path>> storeFiles = new TreeMap<byte[], List<Path>>(Bytes.BYTES_COMPARATOR);
for (Store store: getStores()) {
ArrayList<Path> storeFileNames = new ArrayList<Path>();
for (StoreFile storeFile: store.getStorefiles()) {
storeFileNames.add(storeFile.getPath());
}
storeFiles.put(store.getFamily().getName(), storeFileNames);
}
RegionEventDescriptor regionOpenDesc = ProtobufUtil.toRegionEventDescriptor(
RegionEventDescriptor.EventType.REGION_OPEN, getRegionInfo(), openSeqId,
getRegionServerServices().getServerName(), storeFiles);
WALUtil.writeRegionEventMarker(wal, getTableDesc(), getRegionInfo(), regionOpenDesc,
getSequenceId());
}
private void writeRegionCloseMarker(WAL wal) throws IOException {
Map<byte[], List<Path>> storeFiles = new TreeMap<byte[], List<Path>>(Bytes.BYTES_COMPARATOR);
for (Store store: getStores()) {
ArrayList<Path> storeFileNames = new ArrayList<Path>();
for (StoreFile storeFile: store.getStorefiles()) {
storeFileNames.add(storeFile.getPath());
}
storeFiles.put(store.getFamily().getName(), storeFileNames);
}
RegionEventDescriptor regionEventDesc = ProtobufUtil.toRegionEventDescriptor(
RegionEventDescriptor.EventType.REGION_CLOSE, getRegionInfo(), getSequenceId().get(),
getRegionServerServices().getServerName(), storeFiles);
WALUtil.writeRegionEventMarker(wal, getTableDesc(), getRegionInfo(), regionEventDesc,
getSequenceId());
// Store SeqId in HDFS when a region closes
// checking region folder exists is due to many tests which delete the table folder while a
// table is still online
if (this.fs.getFileSystem().exists(this.fs.getRegionDir())) {
WALSplitter.writeRegionSequenceIdFile(this.fs.getFileSystem(), this.fs.getRegionDir(),
getSequenceId().get(), 0);
}
}
/**
* @return True if this region has references.
*/
public boolean hasReferences() {
for (Store store : this.stores.values()) {
if (store.hasReferences()) return true;
}
return false;
}
@Override
public HDFSBlocksDistribution getHDFSBlocksDistribution() {
HDFSBlocksDistribution hdfsBlocksDistribution =
new HDFSBlocksDistribution();
synchronized (this.stores) {
for (Store store : this.stores.values()) {
for (StoreFile sf : store.getStorefiles()) {
HDFSBlocksDistribution storeFileBlocksDistribution =
sf.getHDFSBlockDistribution();
hdfsBlocksDistribution.add(storeFileBlocksDistribution);
}
}
}
return hdfsBlocksDistribution;
}
/**
* This is a helper function to compute HDFS block distribution on demand
* @param conf configuration
* @param tableDescriptor HTableDescriptor of the table
* @param regionInfo encoded name of the region
* @return The HDFS blocks distribution for the given region.
* @throws IOException
*/
public static HDFSBlocksDistribution computeHDFSBlocksDistribution(final Configuration conf,
final HTableDescriptor tableDescriptor, final HRegionInfo regionInfo) throws IOException {
Path tablePath = FSUtils.getTableDir(FSUtils.getRootDir(conf), tableDescriptor.getTableName());
return computeHDFSBlocksDistribution(conf, tableDescriptor, regionInfo, tablePath);
}
/**
* This is a helper function to compute HDFS block distribution on demand
* @param conf configuration
* @param tableDescriptor HTableDescriptor of the table
* @param regionInfo encoded name of the region
* @param tablePath the table directory
* @return The HDFS blocks distribution for the given region.
* @throws IOException
*/
public static HDFSBlocksDistribution computeHDFSBlocksDistribution(final Configuration conf,
final HTableDescriptor tableDescriptor, final HRegionInfo regionInfo, Path tablePath)
throws IOException {
HDFSBlocksDistribution hdfsBlocksDistribution = new HDFSBlocksDistribution();
FileSystem fs = tablePath.getFileSystem(conf);
HRegionFileSystem regionFs = new HRegionFileSystem(conf, fs, tablePath, regionInfo);
for (HColumnDescriptor family: tableDescriptor.getFamilies()) {
Collection<StoreFileInfo> storeFiles = regionFs.getStoreFiles(family.getNameAsString());
if (storeFiles == null) continue;
for (StoreFileInfo storeFileInfo : storeFiles) {
hdfsBlocksDistribution.add(storeFileInfo.computeHDFSBlocksDistribution(fs));
}
}
return hdfsBlocksDistribution;
}
/**
* Increase the size of mem store in this region and the size of global mem
* store
* @return the size of memstore in this region
*/
public long addAndGetGlobalMemstoreSize(long memStoreSize) {
if (this.rsAccounting != null) {
rsAccounting.addAndGetGlobalMemstoreSize(memStoreSize);
}
return this.memstoreSize.addAndGet(memStoreSize);
}
@Override
public HRegionInfo getRegionInfo() {
return this.fs.getRegionInfo();
}
/**
* @return Instance of {@link RegionServerServices} used by this HRegion.
* Can be null.
*/
RegionServerServices getRegionServerServices() {
return this.rsServices;
}
@Override
public long getReadRequestsCount() {
return readRequestsCount.get();
}
@Override
public void updateReadRequestsCount(long i) {
readRequestsCount.add(i);
}
@Override
public long getWriteRequestsCount() {
return writeRequestsCount.get();
}
@Override
public void updateWriteRequestsCount(long i) {
writeRequestsCount.add(i);
}
@Override
public long getMemstoreSize() {
return memstoreSize.get();
}
@Override
public long getNumMutationsWithoutWAL() {
return numMutationsWithoutWAL.get();
}
@Override
public long getDataInMemoryWithoutWAL() {
return dataInMemoryWithoutWAL.get();
}
@Override
public long getBlockedRequestsCount() {
return blockedRequestsCount.get();
}
@Override
public long getCheckAndMutateChecksPassed() {
return checkAndMutateChecksPassed.get();
}
@Override
public long getCheckAndMutateChecksFailed() {
return checkAndMutateChecksFailed.get();
}
@Override
public MetricsRegion getMetrics() {
return metricsRegion;
}
@Override
public boolean isClosed() {
return this.closed.get();
}
@Override
public boolean isClosing() {
return this.closing.get();
}
@Override
public boolean isReadOnly() {
return this.writestate.isReadOnly();
}
/**
* Reset recovering state of current region
*/
public void setRecovering(boolean newState) {
boolean wasRecovering = this.isRecovering;
// before we flip the recovering switch (enabling reads) we should write the region open
// event to WAL if needed
if (wal != null && getRegionServerServices() != null && !writestate.readOnly
&& wasRecovering && !newState) {
// force a flush only if region replication is set up for this region. Otherwise no need.
boolean forceFlush = getTableDesc().getRegionReplication() > 1;
// force a flush first
MonitoredTask status = TaskMonitor.get().createStatus(
"Flushing region " + this + " because recovery is finished");
try {
if (forceFlush) {
internalFlushcache(status);
}
status.setStatus("Writing region open event marker to WAL because recovery is finished");
try {
long seqId = openSeqNum;
// obtain a new seqId because we possibly have writes and flushes on top of openSeqNum
if (wal != null) {
seqId = getNextSequenceId(wal);
}
writeRegionOpenMarker(wal, seqId);
} catch (IOException e) {
// We cannot rethrow this exception since we are being called from the zk thread. The
// region has already opened. In this case we log the error, but continue
LOG.warn(getRegionInfo().getEncodedName() + " : was not able to write region opening "
+ "event to WAL, continueing", e);
}
} catch (IOException ioe) {
// Distributed log replay semantics does not necessarily require a flush, since the replayed
// data is already written again in the WAL. So failed flush should be fine.
LOG.warn(getRegionInfo().getEncodedName() + " : was not able to flush "
+ "event to WAL, continueing", ioe);
} finally {
status.cleanup();
}
}
this.isRecovering = newState;
if (wasRecovering && !isRecovering) {
// Call only when wal replay is over.
coprocessorHost.postLogReplay();
}
}
@Override
public boolean isRecovering() {
return this.isRecovering;
}
@Override
public boolean isAvailable() {
return !isClosed() && !isClosing();
}
/** @return true if region is splittable */
public boolean isSplittable() {
return isAvailable() && !hasReferences();
}
/**
* @return true if region is mergeable
*/
public boolean isMergeable() {
if (!isAvailable()) {
LOG.debug("Region " + getRegionInfo().getRegionNameAsString()
+ " is not mergeable because it is closing or closed");
return false;
}
if (hasReferences()) {
LOG.debug("Region " + getRegionInfo().getRegionNameAsString()
+ " is not mergeable because it has references");
return false;
}
return true;
}
public boolean areWritesEnabled() {
synchronized(this.writestate) {
return this.writestate.writesEnabled;
}
}
public MultiVersionConsistencyControl getMVCC() {
return mvcc;
}
@Override
public long getMaxFlushedSeqId() {
return maxFlushedSeqId;
}
@Override
public long getReadpoint(IsolationLevel isolationLevel) {
if (isolationLevel == IsolationLevel.READ_UNCOMMITTED) {
// This scan can read even uncommitted transactions
return Long.MAX_VALUE;
}
return mvcc.memstoreReadPoint();
}
@Override
public boolean isLoadingCfsOnDemandDefault() {
return this.isLoadingCfsOnDemandDefault;
}
/**
* Close down this HRegion. Flush the cache, shut down each HStore, don't
* service any more calls.
*
* <p>This method could take some time to execute, so don't call it from a
* time-sensitive thread.
*
* @return Vector of all the storage files that the HRegion's component
* HStores make use of. It's a list of all HStoreFile objects. Returns empty
* vector if already closed and null if judged that it should not close.
*
* @throws IOException e
*/
public Map<byte[], List<StoreFile>> close() throws IOException {
return close(false);
}
private final Object closeLock = new Object();
/** Conf key for the periodic flush interval */
public static final String MEMSTORE_PERIODIC_FLUSH_INTERVAL =
"hbase.regionserver.optionalcacheflushinterval";
/** Default interval for the memstore flush */
public static final int DEFAULT_CACHE_FLUSH_INTERVAL = 3600000;
public static final int META_CACHE_FLUSH_INTERVAL = 300000; // 5 minutes
/** Conf key to force a flush if there are already enough changes for one region in memstore */
public static final String MEMSTORE_FLUSH_PER_CHANGES =
"hbase.regionserver.flush.per.changes";
public static final long DEFAULT_FLUSH_PER_CHANGES = 30000000; // 30 millions
/**
* The following MAX_FLUSH_PER_CHANGES is large enough because each KeyValue has 20+ bytes
* overhead. Therefore, even 1G empty KVs occupy at least 20GB memstore size for a single region
*/
public static final long MAX_FLUSH_PER_CHANGES = 1000000000; // 1G
/**
* Close down this HRegion. Flush the cache unless abort parameter is true,
* Shut down each HStore, don't service any more calls.
*
* This method could take some time to execute, so don't call it from a
* time-sensitive thread.
*
* @param abort true if server is aborting (only during testing)
* @return Vector of all the storage files that the HRegion's component
* HStores make use of. It's a list of HStoreFile objects. Can be null if
* we are not to close at this time or we are already closed.
*
* @throws IOException e
*/
public Map<byte[], List<StoreFile>> close(final boolean abort) throws IOException {
// Only allow one thread to close at a time. Serialize them so dual
// threads attempting to close will run up against each other.
MonitoredTask status = TaskMonitor.get().createStatus(
"Closing region " + this +
(abort ? " due to abort" : ""));
status.setStatus("Waiting for close lock");
try {
synchronized (closeLock) {
return doClose(abort, status);
}
} finally {
status.cleanup();
}
}
private Map<byte[], List<StoreFile>> doClose(final boolean abort, MonitoredTask status)
throws IOException {
if (isClosed()) {
LOG.warn("Region " + this + " already closed");
return null;
}
if (coprocessorHost != null) {
status.setStatus("Running coprocessor pre-close hooks");
this.coprocessorHost.preClose(abort);
}
status.setStatus("Disabling compacts and flushes for region");
boolean canFlush = true;
synchronized (writestate) {
// Disable compacting and flushing by background threads for this
// region.
canFlush = !writestate.readOnly;
writestate.writesEnabled = false;
LOG.debug("Closing " + this + ": disabling compactions & flushes");
waitForFlushesAndCompactions();
}
// If we were not just flushing, is it worth doing a preflush...one
// that will clear out of the bulk of the memstore before we put up
// the close flag?
if (!abort && worthPreFlushing() && canFlush) {
status.setStatus("Pre-flushing region before close");
LOG.info("Running close preflush of " + getRegionInfo().getRegionNameAsString());
try {
internalFlushcache(status);
} catch (IOException ioe) {
// Failed to flush the region. Keep going.
status.setStatus("Failed pre-flush " + this + "; " + ioe.getMessage());
}
}
this.closing.set(true);
status.setStatus("Disabling writes for close");
// block waiting for the lock for closing
lock.writeLock().lock();
try {
if (this.isClosed()) {
status.abort("Already got closed by another process");
// SplitTransaction handles the null
return null;
}
LOG.debug("Updates disabled for region " + this);
// Don't flush the cache if we are aborting
if (!abort && canFlush) {
int flushCount = 0;
while (this.memstoreSize.get() > 0) {
try {
if (flushCount++ > 0) {
int actualFlushes = flushCount - 1;
if (actualFlushes > 5) {
// If we tried 5 times and are unable to clear memory, abort
// so we do not lose data
throw new DroppedSnapshotException("Failed clearing memory after " +
actualFlushes + " attempts on region: " +
Bytes.toStringBinary(getRegionInfo().getRegionName()));
}
LOG.info("Running extra flush, " + actualFlushes +
" (carrying snapshot?) " + this);
}
internalFlushcache(status);
} catch (IOException ioe) {
status.setStatus("Failed flush " + this + ", putting online again");
synchronized (writestate) {
writestate.writesEnabled = true;
}
// Have to throw to upper layers. I can't abort server from here.
throw ioe;
}
}
}
Map<byte[], List<StoreFile>> result =
new TreeMap<byte[], List<StoreFile>>(Bytes.BYTES_COMPARATOR);
if (!stores.isEmpty()) {
// initialize the thread pool for closing stores in parallel.
ThreadPoolExecutor storeCloserThreadPool =
getStoreOpenAndCloseThreadPool("StoreCloserThread-" +
getRegionInfo().getRegionNameAsString());
CompletionService<Pair<byte[], Collection<StoreFile>>> completionService =
new ExecutorCompletionService<Pair<byte[], Collection<StoreFile>>>(storeCloserThreadPool);
// close each store in parallel
for (final Store store : stores.values()) {
assert abort || store.getFlushableSize() == 0 || writestate.readOnly;
completionService
.submit(new Callable<Pair<byte[], Collection<StoreFile>>>() {
@Override
public Pair<byte[], Collection<StoreFile>> call() throws IOException {
return new Pair<byte[], Collection<StoreFile>>(
store.getFamily().getName(), store.close());
}
});
}
try {
for (int i = 0; i < stores.size(); i++) {
Future<Pair<byte[], Collection<StoreFile>>> future = completionService.take();
Pair<byte[], Collection<StoreFile>> storeFiles = future.get();
List<StoreFile> familyFiles = result.get(storeFiles.getFirst());
if (familyFiles == null) {
familyFiles = new ArrayList<StoreFile>();
result.put(storeFiles.getFirst(), familyFiles);
}
familyFiles.addAll(storeFiles.getSecond());
}
} catch (InterruptedException e) {
throw (InterruptedIOException)new InterruptedIOException().initCause(e);
} catch (ExecutionException e) {
throw new IOException(e.getCause());
} finally {
storeCloserThreadPool.shutdownNow();
}
}
status.setStatus("Writing region close event to WAL");
if (!abort && wal != null && getRegionServerServices() != null && !writestate.readOnly) {
writeRegionCloseMarker(wal);
}
this.closed.set(true);
if (!canFlush) {
addAndGetGlobalMemstoreSize(-memstoreSize.get());
} else if (memstoreSize.get() != 0) {
LOG.error("Memstore size is " + memstoreSize.get());
}
if (coprocessorHost != null) {
status.setStatus("Running coprocessor post-close hooks");
this.coprocessorHost.postClose(abort);
}
if (this.metricsRegion != null) {
this.metricsRegion.close();
}
if (this.metricsRegionWrapper != null) {
Closeables.closeQuietly(this.metricsRegionWrapper);
}
status.markComplete("Closed");
LOG.info("Closed " + this);
return result;
} finally {
lock.writeLock().unlock();
}
}
@Override
public void waitForFlushesAndCompactions() {
synchronized (writestate) {
if (this.writestate.readOnly) {
// we should not wait for replayed flushed if we are read only (for example in case the
// region is a secondary replica).
return;
}
boolean interrupted = false;
try {
while (writestate.compacting > 0 || writestate.flushing) {
LOG.debug("waiting for " + writestate.compacting + " compactions"
+ (writestate.flushing ? " & cache flush" : "") + " to complete for region " + this);
try {
writestate.wait();
} catch (InterruptedException iex) {
// essentially ignore and propagate the interrupt back up
LOG.warn("Interrupted while waiting");
interrupted = true;
}
}
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
}
protected ThreadPoolExecutor getStoreOpenAndCloseThreadPool(
final String threadNamePrefix) {
int numStores = Math.max(1, this.htableDescriptor.getFamilies().size());
int maxThreads = Math.min(numStores,
conf.getInt(HConstants.HSTORE_OPEN_AND_CLOSE_THREADS_MAX,
HConstants.DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX));
return getOpenAndCloseThreadPool(maxThreads, threadNamePrefix);
}
protected ThreadPoolExecutor getStoreFileOpenAndCloseThreadPool(
final String threadNamePrefix) {
int numStores = Math.max(1, this.htableDescriptor.getFamilies().size());
int maxThreads = Math.max(1,
conf.getInt(HConstants.HSTORE_OPEN_AND_CLOSE_THREADS_MAX,
HConstants.DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX)
/ numStores);
return getOpenAndCloseThreadPool(maxThreads, threadNamePrefix);
}
static ThreadPoolExecutor getOpenAndCloseThreadPool(int maxThreads,
final String threadNamePrefix) {
return Threads.getBoundedCachedThreadPool(maxThreads, 30L, TimeUnit.SECONDS,
new ThreadFactory() {
private int count = 1;
@Override
public Thread newThread(Runnable r) {
return new Thread(r, threadNamePrefix + "-" + count++);
}
});
}
/**
* @return True if its worth doing a flush before we put up the close flag.
*/
private boolean worthPreFlushing() {
return this.memstoreSize.get() >
this.conf.getLong("hbase.hregion.preclose.flush.size", 1024 * 1024 * 5);
}
//////////////////////////////////////////////////////////////////////////////
// HRegion accessors
//////////////////////////////////////////////////////////////////////////////
@Override
public HTableDescriptor getTableDesc() {
return this.htableDescriptor;
}
/** @return WAL in use for this region */
public WAL getWAL() {
return this.wal;
}
/**
* A split takes the config from the parent region & passes it to the daughter
* region's constructor. If 'conf' was passed, you would end up using the HTD
* of the parent region in addition to the new daughter HTD. Pass 'baseConf'
* to the daughter regions to avoid this tricky dedupe problem.
* @return Configuration object
*/
Configuration getBaseConf() {
return this.baseConf;
}
/** @return {@link FileSystem} being used by this region */
public FileSystem getFilesystem() {
return fs.getFileSystem();
}
/** @return the {@link HRegionFileSystem} used by this region */
public HRegionFileSystem getRegionFileSystem() {
return this.fs;
}
@Override
public long getEarliestFlushTimeForAllStores() {
return lastStoreFlushTimeMap.isEmpty() ? Long.MAX_VALUE : Collections.min(lastStoreFlushTimeMap
.values());
}
@Override
public long getOldestHfileTs(boolean majorCompactioOnly) throws IOException {
long result = Long.MAX_VALUE;
for (Store store : getStores()) {
for (StoreFile file : store.getStorefiles()) {
HFile.Reader reader = file.getReader().getHFileReader();
if (majorCompactioOnly) {
byte[] val = reader.loadFileInfo().get(StoreFile.MAJOR_COMPACTION_KEY);
if (val == null || !Bytes.toBoolean(val)) {
continue;
}
}
result = Math.min(result, reader.getFileContext().getFileCreateTime());
}
}
return result == Long.MAX_VALUE ? 0 : result;
}
RegionLoad.Builder setCompleteSequenceId(RegionLoad.Builder regionLoadBldr) {
long lastFlushOpSeqIdLocal = this.lastFlushOpSeqId;
byte[] encodedRegionName = this.getRegionInfo().getEncodedNameAsBytes();
regionLoadBldr.clearStoreCompleteSequenceId();
for (byte[] familyName : this.stores.keySet()) {
long oldestUnflushedSeqId = this.wal.getEarliestMemstoreSeqNum(encodedRegionName, familyName);
// no oldestUnflushedSeqId means no data has written to the store after last flush, so we use
// lastFlushOpSeqId as complete sequence id for the store.
regionLoadBldr.addStoreCompleteSequenceId(StoreSequenceId
.newBuilder()
.setFamilyName(ByteString.copyFrom(familyName))
.setSequenceId(
oldestUnflushedSeqId < 0 ? lastFlushOpSeqIdLocal : oldestUnflushedSeqId - 1).build());
}
return regionLoadBldr.setCompleteSequenceId(this.maxFlushedSeqId);
}
//////////////////////////////////////////////////////////////////////////////
// HRegion maintenance.
//
// These methods are meant to be called periodically by the HRegionServer for
// upkeep.
//////////////////////////////////////////////////////////////////////////////
/** @return returns size of largest HStore. */
public long getLargestHStoreSize() {
long size = 0;
for (Store h : stores.values()) {
long storeSize = h.getSize();
if (storeSize > size) {
size = storeSize;
}
}
return size;
}
/**
* @return KeyValue Comparator
*/
public KeyValue.KVComparator getComparator() {
return this.comparator;
}
/*
* Do preparation for pending compaction.
* @throws IOException
*/
protected void doRegionCompactionPrep() throws IOException {
}
@Override
public void triggerMajorCompaction() throws IOException {
for (Store s : getStores()) {
s.triggerMajorCompaction();
}
}
@Override
public void compact(final boolean majorCompaction) throws IOException {
if (majorCompaction) {
triggerMajorCompaction();
}
for (Store s : getStores()) {
CompactionContext compaction = s.requestCompaction();
if (compaction != null) {
CompactionThroughputController controller = null;
if (rsServices != null) {
controller = CompactionThroughputControllerFactory.create(rsServices, conf);
}
if (controller == null) {
controller = NoLimitCompactionThroughputController.INSTANCE;
}
compact(compaction, s, controller);
}
}
}
/**
* This is a helper function that compact all the stores synchronously
* It is used by utilities and testing
*
* @throws IOException e
*/
public void compactStores() throws IOException {
for (Store s : getStores()) {
CompactionContext compaction = s.requestCompaction();
if (compaction != null) {
compact(compaction, s, NoLimitCompactionThroughputController.INSTANCE);
}
}
}
/**
* This is a helper function that compact the given store
* It is used by utilities and testing
*
* @throws IOException e
*/
@VisibleForTesting
void compactStore(byte[] family, CompactionThroughputController throughputController)
throws IOException {
Store s = getStore(family);
CompactionContext compaction = s.requestCompaction();
if (compaction != null) {
compact(compaction, s, throughputController);
}
}
/*
* Called by compaction thread and after region is opened to compact the
* HStores if necessary.
*
* <p>This operation could block for a long time, so don't call it from a
* time-sensitive thread.
*
* Note that no locking is necessary at this level because compaction only
* conflicts with a region split, and that cannot happen because the region
* server does them sequentially and not in parallel.
*
* @param compaction Compaction details, obtained by requestCompaction()
* @return whether the compaction completed
*/
public boolean compact(CompactionContext compaction, Store store,
CompactionThroughputController throughputController) throws IOException {
assert compaction != null && compaction.hasSelection();
assert !compaction.getRequest().getFiles().isEmpty();
if (this.closing.get() || this.closed.get()) {
LOG.debug("Skipping compaction on " + this + " because closing/closed");
store.cancelRequestedCompaction(compaction);
return false;
}
MonitoredTask status = null;
boolean requestNeedsCancellation = true;
// block waiting for the lock for compaction
lock.readLock().lock();
try {
byte[] cf = Bytes.toBytes(store.getColumnFamilyName());
if (stores.get(cf) != store) {
LOG.warn("Store " + store.getColumnFamilyName() + " on region " + this
+ " has been re-instantiated, cancel this compaction request. "
+ " It may be caused by the roll back of split transaction");
return false;
}
status = TaskMonitor.get().createStatus("Compacting " + store + " in " + this);
if (this.closed.get()) {
String msg = "Skipping compaction on " + this + " because closed";
LOG.debug(msg);
status.abort(msg);
return false;
}
boolean wasStateSet = false;
try {
synchronized (writestate) {
if (writestate.writesEnabled) {
wasStateSet = true;
++writestate.compacting;
} else {
String msg = "NOT compacting region " + this + ". Writes disabled.";
LOG.info(msg);
status.abort(msg);
return false;
}
}
LOG.info("Starting compaction on " + store + " in region " + this
+ (compaction.getRequest().isOffPeak()?" as an off-peak compaction":""));
doRegionCompactionPrep();
try {
status.setStatus("Compacting store " + store);
// We no longer need to cancel the request on the way out of this
// method because Store#compact will clean up unconditionally
requestNeedsCancellation = false;
store.compact(compaction, throughputController);
} catch (InterruptedIOException iioe) {
String msg = "compaction interrupted";
LOG.info(msg, iioe);
status.abort(msg);
return false;
}
} finally {
if (wasStateSet) {
synchronized (writestate) {
--writestate.compacting;
if (writestate.compacting <= 0) {
writestate.notifyAll();
}
}
}
}
status.markComplete("Compaction complete");
return true;
} finally {
try {
if (requestNeedsCancellation) store.cancelRequestedCompaction(compaction);
if (status != null) status.cleanup();
} finally {
lock.readLock().unlock();
}
}
}
@Override
public FlushResult flush(boolean force) throws IOException {
return flushcache(force, false);
}
/**
* Flush the cache.
*
* When this method is called the cache will be flushed unless:
* <ol>
* <li>the cache is empty</li>
* <li>the region is closed.</li>
* <li>a flush is already in progress</li>
* <li>writes are disabled</li>
* </ol>
*
* <p>This method may block for some time, so it should not be called from a
* time-sensitive thread.
* @param forceFlushAllStores whether we want to flush all stores
* @param writeFlushRequestWalMarker whether to write the flush request marker to WAL
* @return whether the flush is success and whether the region needs compacting
*
* @throws IOException general io exceptions
* @throws DroppedSnapshotException Thrown when replay of wal is required
* because a Snapshot was not properly persisted.
*/
public FlushResult flushcache(boolean forceFlushAllStores, boolean writeFlushRequestWalMarker)
throws IOException {
// fail-fast instead of waiting on the lock
if (this.closing.get()) {
String msg = "Skipping flush on " + this + " because closing";
LOG.debug(msg);
return new FlushResultImpl(FlushResult.Result.CANNOT_FLUSH, msg, false);
}
MonitoredTask status = TaskMonitor.get().createStatus("Flushing " + this);
status.setStatus("Acquiring readlock on region");
// block waiting for the lock for flushing cache
lock.readLock().lock();
try {
if (this.closed.get()) {
String msg = "Skipping flush on " + this + " because closed";
LOG.debug(msg);
status.abort(msg);
return new FlushResultImpl(FlushResult.Result.CANNOT_FLUSH, msg, false);
}
if (coprocessorHost != null) {
status.setStatus("Running coprocessor pre-flush hooks");
coprocessorHost.preFlush();
}
// TODO: this should be managed within memstore with the snapshot, updated only after flush
// successful
if (numMutationsWithoutWAL.get() > 0) {
numMutationsWithoutWAL.set(0);
dataInMemoryWithoutWAL.set(0);
}
synchronized (writestate) {
if (!writestate.flushing && writestate.writesEnabled) {
this.writestate.flushing = true;
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("NOT flushing memstore for region " + this
+ ", flushing=" + writestate.flushing + ", writesEnabled="
+ writestate.writesEnabled);
}
String msg = "Not flushing since "
+ (writestate.flushing ? "already flushing"
: "writes not enabled");
status.abort(msg);
return new FlushResultImpl(FlushResult.Result.CANNOT_FLUSH, msg, false);
}
}
try {
Collection<Store> specificStoresToFlush =
forceFlushAllStores ? stores.values() : flushPolicy.selectStoresToFlush();
FlushResult fs = internalFlushcache(specificStoresToFlush,
status, writeFlushRequestWalMarker);
if (coprocessorHost != null) {
status.setStatus("Running post-flush coprocessor hooks");
coprocessorHost.postFlush();
}
status.markComplete("Flush successful");
return fs;
} finally {
synchronized (writestate) {
writestate.flushing = false;
this.writestate.flushRequested = false;
writestate.notifyAll();
}
}
} finally {
lock.readLock().unlock();
status.cleanup();
}
}
/**
* Should the store be flushed because it is old enough.
* <p>
* Every FlushPolicy should call this to determine whether a store is old enough to flush(except
* that you always flush all stores). Otherwise the {@link #shouldFlush()} method will always
* returns true which will make a lot of flush requests.
*/
boolean shouldFlushStore(Store store) {
long maxFlushedSeqId =
this.wal.getEarliestMemstoreSeqNum(getRegionInfo().getEncodedNameAsBytes(), store
.getFamily().getName()) - 1;
if (maxFlushedSeqId > 0 && maxFlushedSeqId + flushPerChanges < sequenceId.get()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Column Family: " + store.getColumnFamilyName() + " of region " + this
+ " will be flushed because its max flushed seqId(" + maxFlushedSeqId
+ ") is far away from current(" + sequenceId.get() + "), max allowed is "
+ flushPerChanges);
}
return true;
}
if (flushCheckInterval <= 0) {
return false;
}
long now = EnvironmentEdgeManager.currentTime();
if (store.timeOfOldestEdit() < now - flushCheckInterval) {
if (LOG.isDebugEnabled()) {
LOG.debug("Column Family: " + store.getColumnFamilyName() + " of region " + this
+ " will be flushed because time of its oldest edit (" + store.timeOfOldestEdit()
+ ") is far away from now(" + now + "), max allowed is " + flushCheckInterval);
}
return true;
}
return false;
}
/**
* Should the memstore be flushed now
*/
boolean shouldFlush() {
// This is a rough measure.
if (this.maxFlushedSeqId > 0
&& (this.maxFlushedSeqId + this.flushPerChanges < this.sequenceId.get())) {
return true;
}
long modifiedFlushCheckInterval = flushCheckInterval;
if (getRegionInfo().isMetaRegion() &&
getRegionInfo().getReplicaId() == HRegionInfo.DEFAULT_REPLICA_ID) {
modifiedFlushCheckInterval = META_CACHE_FLUSH_INTERVAL;
}
if (modifiedFlushCheckInterval <= 0) { //disabled
return false;
}
long now = EnvironmentEdgeManager.currentTime();
//if we flushed in the recent past, we don't need to do again now
if ((now - getEarliestFlushTimeForAllStores() < modifiedFlushCheckInterval)) {
return false;
}
//since we didn't flush in the recent past, flush now if certain conditions
//are met. Return true on first such memstore hit.
for (Store s : getStores()) {
if (s.timeOfOldestEdit() < now - modifiedFlushCheckInterval) {
// we have an old enough edit in the memstore, flush
return true;
}
}
return false;
}
/**
* Flushing all stores.
*
* @see #internalFlushcache(Collection, MonitoredTask, boolean)
*/
private FlushResult internalFlushcache(MonitoredTask status)
throws IOException {
return internalFlushcache(stores.values(), status, false);
}
/**
* Flushing given stores.
*
* @see #internalFlushcache(WAL, long, Collection, MonitoredTask, boolean)
*/
private FlushResult internalFlushcache(final Collection<Store> storesToFlush,
MonitoredTask status, boolean writeFlushWalMarker) throws IOException {
return internalFlushcache(this.wal, HConstants.NO_SEQNUM, storesToFlush,
status, writeFlushWalMarker);
}
/**
* Flush the memstore. Flushing the memstore is a little tricky. We have a lot
* of updates in the memstore, all of which have also been written to the wal.
* We need to write those updates in the memstore out to disk, while being
* able to process reads/writes as much as possible during the flush
* operation.
* <p>
* This method may block for some time. Every time you call it, we up the
* regions sequence id even if we don't flush; i.e. the returned region id
* will be at least one larger than the last edit applied to this region. The
* returned id does not refer to an actual edit. The returned id can be used
* for say installing a bulk loaded file just ahead of the last hfile that was
* the result of this flush, etc.
*
* @param wal
* Null if we're NOT to go via wal.
* @param myseqid
* The seqid to use if <code>wal</code> is null writing out flush
* file.
* @param storesToFlush
* The list of stores to flush.
* @return object describing the flush's state
* @throws IOException
* general io exceptions
* @throws DroppedSnapshotException
* Thrown when replay of wal is required because a Snapshot was not
* properly persisted.
*/
protected FlushResult internalFlushcache(final WAL wal, final long myseqid,
final Collection<Store> storesToFlush, MonitoredTask status, boolean writeFlushWalMarker)
throws IOException {
PrepareFlushResult result
= internalPrepareFlushCache(wal, myseqid, storesToFlush, status, writeFlushWalMarker);
if (result.result == null) {
return internalFlushCacheAndCommit(wal, status, result, storesToFlush);
} else {
return result.result; // early exit due to failure from prepare stage
}
}
protected PrepareFlushResult internalPrepareFlushCache(
final WAL wal, final long myseqid, final Collection<Store> storesToFlush,
MonitoredTask status, boolean writeFlushWalMarker)
throws IOException {
if (this.rsServices != null && this.rsServices.isAborted()) {
// Don't flush when server aborting, it's unsafe
throw new IOException("Aborting flush because server is aborted...");
}
final long startTime = EnvironmentEdgeManager.currentTime();
// If nothing to flush, return, but we need to safely update the region sequence id
if (this.memstoreSize.get() <= 0) {
// Take an update lock because am about to change the sequence id and we want the sequence id
// to be at the border of the empty memstore.
MultiVersionConsistencyControl.WriteEntry w = null;
this.updatesLock.writeLock().lock();
try {
if (this.memstoreSize.get() <= 0) {
// Presume that if there are still no edits in the memstore, then there are no edits for
// this region out in the WAL subsystem so no need to do any trickery clearing out
// edits in the WAL system. Up the sequence number so the resulting flush id is for
// sure just beyond the last appended region edit (useful as a marker when bulk loading,
// etc.)
// wal can be null replaying edits.
if (wal != null) {
w = mvcc.beginMemstoreInsert();
long flushOpSeqId = getNextSequenceId(wal);
FlushResult flushResult = new FlushResultImpl(
FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY, flushOpSeqId, "Nothing to flush",
writeFlushRequestMarkerToWAL(wal, writeFlushWalMarker));
w.setWriteNumber(flushOpSeqId);
mvcc.waitForPreviousTransactionsComplete(w);
w = null;
return new PrepareFlushResult(flushResult, myseqid);
} else {
return new PrepareFlushResult(
new FlushResultImpl(FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY,
"Nothing to flush", false),
myseqid);
}
}
} finally {
this.updatesLock.writeLock().unlock();
if (w != null) {
mvcc.advanceMemstore(w);
}
}
}
if (LOG.isInfoEnabled()) {
LOG.info("Started memstore flush for " + this + ", current region memstore size "
+ StringUtils.byteDesc(this.memstoreSize.get()) + ", and " + storesToFlush.size() + "/"
+ stores.size() + " column families' memstores are being flushed."
+ ((wal != null) ? "" : "; wal is null, using passed sequenceid=" + myseqid));
// only log when we are not flushing all stores.
if (this.stores.size() > storesToFlush.size()) {
for (Store store: storesToFlush) {
LOG.info("Flushing Column Family: " + store.getColumnFamilyName()
+ " which was occupying "
+ StringUtils.byteDesc(store.getMemStoreSize()) + " of memstore.");
}
}
}
// Stop updates while we snapshot the memstore of all of these regions' stores. We only have
// to do this for a moment. It is quick. We also set the memstore size to zero here before we
// allow updates again so its value will represent the size of the updates received
// during flush
MultiVersionConsistencyControl.WriteEntry w = null;
// We have to take an update lock during snapshot, or else a write could end up in both snapshot
// and memstore (makes it difficult to do atomic rows then)
status.setStatus("Obtaining lock to block concurrent updates");
// block waiting for the lock for internal flush
this.updatesLock.writeLock().lock();
status.setStatus("Preparing to flush by snapshotting stores in " +
getRegionInfo().getEncodedName());
long totalFlushableSizeOfFlushableStores = 0;
Set<byte[]> flushedFamilyNames = new HashSet<byte[]>();
for (Store store: storesToFlush) {
flushedFamilyNames.add(store.getFamily().getName());
}
TreeMap<byte[], StoreFlushContext> storeFlushCtxs
= new TreeMap<byte[], StoreFlushContext>(Bytes.BYTES_COMPARATOR);
TreeMap<byte[], List<Path>> committedFiles = new TreeMap<byte[], List<Path>>(
Bytes.BYTES_COMPARATOR);
// The sequence id of this flush operation which is used to log FlushMarker and pass to
// createFlushContext to use as the store file's sequence id.
long flushOpSeqId = HConstants.NO_SEQNUM;
// The max flushed sequence id after this flush operation. Used as completeSequenceId which is
// passed to HMaster.
long flushedSeqId = HConstants.NO_SEQNUM;
byte[] encodedRegionName = getRegionInfo().getEncodedNameAsBytes();
long trxId = 0;
try {
try {
w = mvcc.beginMemstoreInsert();
if (wal != null) {
if (!wal.startCacheFlush(encodedRegionName, flushedFamilyNames)) {
// This should never happen.
String msg = "Flush will not be started for ["
+ this.getRegionInfo().getEncodedName() + "] - because the WAL is closing.";
status.setStatus(msg);
return new PrepareFlushResult(
new FlushResultImpl(FlushResult.Result.CANNOT_FLUSH, msg, false),
myseqid);
}
flushOpSeqId = getNextSequenceId(wal);
long oldestUnflushedSeqId = wal.getEarliestMemstoreSeqNum(encodedRegionName);
// no oldestUnflushedSeqId means we flushed all stores.
// or the unflushed stores are all empty.
flushedSeqId = (oldestUnflushedSeqId == HConstants.NO_SEQNUM) ? flushOpSeqId
: oldestUnflushedSeqId - 1;
} else {
// use the provided sequence Id as WAL is not being used for this flush.
flushedSeqId = flushOpSeqId = myseqid;
}
for (Store s : storesToFlush) {
totalFlushableSizeOfFlushableStores += s.getFlushableSize();
storeFlushCtxs.put(s.getFamily().getName(), s.createFlushContext(flushOpSeqId));
committedFiles.put(s.getFamily().getName(), null); // for writing stores to WAL
}
// write the snapshot start to WAL
if (wal != null && !writestate.readOnly) {
FlushDescriptor desc = ProtobufUtil.toFlushDescriptor(FlushAction.START_FLUSH,
getRegionInfo(), flushOpSeqId, committedFiles);
// no sync. Sync is below where we do not hold the updates lock
trxId = WALUtil.writeFlushMarker(wal, this.htableDescriptor, getRegionInfo(),
desc, sequenceId, false);
}
// Prepare flush (take a snapshot)
for (StoreFlushContext flush : storeFlushCtxs.values()) {
flush.prepare();
}
} catch (IOException ex) {
if (wal != null) {
if (trxId > 0) { // check whether we have already written START_FLUSH to WAL
try {
FlushDescriptor desc = ProtobufUtil.toFlushDescriptor(FlushAction.ABORT_FLUSH,
getRegionInfo(), flushOpSeqId, committedFiles);
WALUtil.writeFlushMarker(wal, this.htableDescriptor, getRegionInfo(),
desc, sequenceId, false);
} catch (Throwable t) {
LOG.warn("Received unexpected exception trying to write ABORT_FLUSH marker to WAL:" +
StringUtils.stringifyException(t));
// ignore this since we will be aborting the RS with DSE.
}
}
// we have called wal.startCacheFlush(), now we have to abort it
wal.abortCacheFlush(this.getRegionInfo().getEncodedNameAsBytes());
throw ex; // let upper layers deal with it.
}
} finally {
this.updatesLock.writeLock().unlock();
}
String s = "Finished memstore snapshotting " + this +
", syncing WAL and waiting on mvcc, flushsize=" + totalFlushableSizeOfFlushableStores;
status.setStatus(s);
if (LOG.isTraceEnabled()) LOG.trace(s);
// sync unflushed WAL changes
// see HBASE-8208 for details
if (wal != null) {
try {
wal.sync(); // ensure that flush marker is sync'ed
} catch (IOException ioe) {
LOG.warn("Unexpected exception while wal.sync(), ignoring. Exception: "
+ StringUtils.stringifyException(ioe));
}
}
// wait for all in-progress transactions to commit to WAL before
// we can start the flush. This prevents
// uncommitted transactions from being written into HFiles.
// We have to block before we start the flush, otherwise keys that
// were removed via a rollbackMemstore could be written to Hfiles.
w.setWriteNumber(flushOpSeqId);
mvcc.waitForPreviousTransactionsComplete(w);
// set w to null to prevent mvcc.advanceMemstore from being called again inside finally block
w = null;
} finally {
if (w != null) {
// in case of failure just mark current w as complete
mvcc.advanceMemstore(w);
}
}
return new PrepareFlushResult(storeFlushCtxs, committedFiles, startTime, flushOpSeqId,
flushedSeqId, totalFlushableSizeOfFlushableStores);
}
/**
* Writes a marker to WAL indicating a flush is requested but cannot be complete due to various
* reasons. Ignores exceptions from WAL. Returns whether the write succeeded.
* @param wal
* @return whether WAL write was successful
*/
private boolean writeFlushRequestMarkerToWAL(WAL wal, boolean writeFlushWalMarker) {
if (writeFlushWalMarker && wal != null && !writestate.readOnly) {
FlushDescriptor desc = ProtobufUtil.toFlushDescriptor(FlushAction.CANNOT_FLUSH,
getRegionInfo(), -1, new TreeMap<byte[], List<Path>>());
try {
WALUtil.writeFlushMarker(wal, this.htableDescriptor, getRegionInfo(),
desc, sequenceId, true);
return true;
} catch (IOException e) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received exception while trying to write the flush request to wal", e);
}
}
return false;
}
protected FlushResult internalFlushCacheAndCommit(
final WAL wal, MonitoredTask status, final PrepareFlushResult prepareResult,
final Collection<Store> storesToFlush)
throws IOException {
// prepare flush context is carried via PrepareFlushResult
TreeMap<byte[], StoreFlushContext> storeFlushCtxs = prepareResult.storeFlushCtxs;
TreeMap<byte[], List<Path>> committedFiles = prepareResult.committedFiles;
long startTime = prepareResult.startTime;
long flushOpSeqId = prepareResult.flushOpSeqId;
long flushedSeqId = prepareResult.flushedSeqId;
long totalFlushableSizeOfFlushableStores = prepareResult.totalFlushableSize;
String s = "Flushing stores of " + this;
status.setStatus(s);
if (LOG.isTraceEnabled()) LOG.trace(s);
// Any failure from here on out will be catastrophic requiring server
// restart so wal content can be replayed and put back into the memstore.
// Otherwise, the snapshot content while backed up in the wal, it will not
// be part of the current running servers state.
boolean compactionRequested = false;
try {
// A. Flush memstore to all the HStores.
// Keep running vector of all store files that includes both old and the
// just-made new flush store file. The new flushed file is still in the
// tmp directory.
for (StoreFlushContext flush : storeFlushCtxs.values()) {
flush.flushCache(status);
}
// Switch snapshot (in memstore) -> new hfile (thus causing
// all the store scanners to reset/reseek).
Iterator<Store> it = storesToFlush.iterator();
// stores.values() and storeFlushCtxs have same order
for (StoreFlushContext flush : storeFlushCtxs.values()) {
boolean needsCompaction = flush.commit(status);
if (needsCompaction) {
compactionRequested = true;
}
committedFiles.put(it.next().getFamily().getName(), flush.getCommittedFiles());
}
storeFlushCtxs.clear();
// Set down the memstore size by amount of flush.
this.addAndGetGlobalMemstoreSize(-totalFlushableSizeOfFlushableStores);
if (wal != null) {
// write flush marker to WAL. If fail, we should throw DroppedSnapshotException
FlushDescriptor desc = ProtobufUtil.toFlushDescriptor(FlushAction.COMMIT_FLUSH,
getRegionInfo(), flushOpSeqId, committedFiles);
WALUtil.writeFlushMarker(wal, this.htableDescriptor, getRegionInfo(),
desc, sequenceId, true);
}
} catch (Throwable t) {
// An exception here means that the snapshot was not persisted.
// The wal needs to be replayed so its content is restored to memstore.
// Currently, only a server restart will do this.
// We used to only catch IOEs but its possible that we'd get other
// exceptions -- e.g. HBASE-659 was about an NPE -- so now we catch
// all and sundry.
if (wal != null) {
try {
FlushDescriptor desc = ProtobufUtil.toFlushDescriptor(FlushAction.ABORT_FLUSH,
getRegionInfo(), flushOpSeqId, committedFiles);
WALUtil.writeFlushMarker(wal, this.htableDescriptor, getRegionInfo(),
desc, sequenceId, false);
} catch (Throwable ex) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received unexpected exception trying to write ABORT_FLUSH marker to WAL:"
+ StringUtils.stringifyException(ex));
// ignore this since we will be aborting the RS with DSE.
}
wal.abortCacheFlush(this.getRegionInfo().getEncodedNameAsBytes());
}
DroppedSnapshotException dse = new DroppedSnapshotException("region: " +
Bytes.toStringBinary(getRegionInfo().getRegionName()));
dse.initCause(t);
status.abort("Flush failed: " + StringUtils.stringifyException(t));
throw dse;
}
// If we get to here, the HStores have been written.
if (wal != null) {
wal.completeCacheFlush(this.getRegionInfo().getEncodedNameAsBytes());
}
// Record latest flush time
for (Store store: storesToFlush) {
this.lastStoreFlushTimeMap.put(store, startTime);
}
// Update the oldest unflushed sequence id for region.
this.maxFlushedSeqId = flushedSeqId;
// Record flush operation sequence id.
this.lastFlushOpSeqId = flushOpSeqId;
// C. Finally notify anyone waiting on memstore to clear:
// e.g. checkResources().
synchronized (this) {
notifyAll(); // FindBugs NN_NAKED_NOTIFY
}
long time = EnvironmentEdgeManager.currentTime() - startTime;
long memstoresize = this.memstoreSize.get();
String msg = "Finished memstore flush of ~"
+ StringUtils.byteDesc(totalFlushableSizeOfFlushableStores) + "/"
+ totalFlushableSizeOfFlushableStores + ", currentsize="
+ StringUtils.byteDesc(memstoresize) + "/" + memstoresize
+ " for region " + this + " in " + time + "ms, sequenceid="
+ flushOpSeqId + ", compaction requested=" + compactionRequested
+ ((wal == null) ? "; wal=null" : "");
LOG.info(msg);
status.setStatus(msg);
return new FlushResultImpl(compactionRequested ?
FlushResult.Result.FLUSHED_COMPACTION_NEEDED :
FlushResult.Result.FLUSHED_NO_COMPACTION_NEEDED,
flushOpSeqId);
}
/**
* Method to safely get the next sequence number.
* @return Next sequence number unassociated with any actual edit.
* @throws IOException
*/
@VisibleForTesting
protected long getNextSequenceId(final WAL wal) throws IOException {
WALKey key = this.appendEmptyEdit(wal, null);
return key.getSequenceId();
}
//////////////////////////////////////////////////////////////////////////////
// get() methods for client use.
//////////////////////////////////////////////////////////////////////////////
@Override
public Result getClosestRowBefore(final byte [] row, final byte [] family) throws IOException {
if (coprocessorHost != null) {
Result result = new Result();
if (coprocessorHost.preGetClosestRowBefore(row, family, result)) {
return result;
}
}
// look across all the HStores for this region and determine what the
// closest key is across all column families, since the data may be sparse
checkRow(row, "getClosestRowBefore");
startRegionOperation(Operation.GET);
this.readRequestsCount.increment();
try {
Store store = getStore(family);
// get the closest key. (HStore.getRowKeyAtOrBefore can return null)
Cell key = store.getRowKeyAtOrBefore(row);
Result result = null;
if (key != null) {
Get get = new Get(CellUtil.cloneRow(key));
get.addFamily(family);
result = get(get);
}
if (coprocessorHost != null) {
coprocessorHost.postGetClosestRowBefore(row, family, result);
}
return result;
} finally {
closeRegionOperation(Operation.GET);
}
}
@Override
public RegionScanner getScanner(Scan scan) throws IOException {
return getScanner(scan, null);
}
protected RegionScanner getScanner(Scan scan,
List<KeyValueScanner> additionalScanners) throws IOException {
startRegionOperation(Operation.SCAN);
try {
// Verify families are all valid
if (!scan.hasFamilies()) {
// Adding all families to scanner
for (byte[] family: this.htableDescriptor.getFamiliesKeys()) {
scan.addFamily(family);
}
} else {
for (byte [] family : scan.getFamilyMap().keySet()) {
checkFamily(family);
}
}
return instantiateRegionScanner(scan, additionalScanners);
} finally {
closeRegionOperation(Operation.SCAN);
}
}
protected RegionScanner instantiateRegionScanner(Scan scan,
List<KeyValueScanner> additionalScanners) throws IOException {
if (scan.isReversed()) {
if (scan.getFilter() != null) {
scan.getFilter().setReversed(true);
}
return new ReversedRegionScannerImpl(scan, additionalScanners, this);
}
return new RegionScannerImpl(scan, additionalScanners, this);
}
@Override
public void prepareDelete(Delete delete) throws IOException {
// Check to see if this is a deleteRow insert
if(delete.getFamilyCellMap().isEmpty()){
for(byte [] family : this.htableDescriptor.getFamiliesKeys()){
// Don't eat the timestamp
delete.addFamily(family, delete.getTimeStamp());
}
} else {
for(byte [] family : delete.getFamilyCellMap().keySet()) {
if(family == null) {
throw new NoSuchColumnFamilyException("Empty family is invalid");
}
checkFamily(family);
}
}
}
@Override
public void delete(Delete delete) throws IOException {
checkReadOnly();
checkResources();
startRegionOperation(Operation.DELETE);
try {
delete.getRow();
// All edits for the given row (across all column families) must happen atomically.
doBatchMutate(delete);
} finally {
closeRegionOperation(Operation.DELETE);
}
}
/**
* Row needed by below method.
*/
private static final byte [] FOR_UNIT_TESTS_ONLY = Bytes.toBytes("ForUnitTestsOnly");
/**
* This is used only by unit tests. Not required to be a public API.
* @param familyMap map of family to edits for the given family.
* @throws IOException
*/
void delete(NavigableMap<byte[], List<Cell>> familyMap,
Durability durability) throws IOException {
Delete delete = new Delete(FOR_UNIT_TESTS_ONLY);
delete.setFamilyCellMap(familyMap);
delete.setDurability(durability);
doBatchMutate(delete);
}
@Override
public void prepareDeleteTimestamps(Mutation mutation, Map<byte[], List<Cell>> familyMap,
byte[] byteNow) throws IOException {
for (Map.Entry<byte[], List<Cell>> e : familyMap.entrySet()) {
byte[] family = e.getKey();
List<Cell> cells = e.getValue();
assert cells instanceof RandomAccess;
Map<byte[], Integer> kvCount = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
int listSize = cells.size();
for (int i=0; i < listSize; i++) {
Cell cell = cells.get(i);
// Check if time is LATEST, change to time of most recent addition if so
// This is expensive.
if (cell.getTimestamp() == HConstants.LATEST_TIMESTAMP && CellUtil.isDeleteType(cell)) {
byte[] qual = CellUtil.cloneQualifier(cell);
if (qual == null) qual = HConstants.EMPTY_BYTE_ARRAY;
Integer count = kvCount.get(qual);
if (count == null) {
kvCount.put(qual, 1);
} else {
kvCount.put(qual, count + 1);
}
count = kvCount.get(qual);
Get get = new Get(CellUtil.cloneRow(cell));
get.setMaxVersions(count);
get.addColumn(family, qual);
if (coprocessorHost != null) {
if (!coprocessorHost.prePrepareTimeStampForDeleteVersion(mutation, cell,
byteNow, get)) {
updateDeleteLatestVersionTimeStamp(cell, get, count, byteNow);
}
} else {
updateDeleteLatestVersionTimeStamp(cell, get, count, byteNow);
}
} else {
CellUtil.updateLatestStamp(cell, byteNow, 0);
}
}
}
}
void updateDeleteLatestVersionTimeStamp(Cell cell, Get get, int count, byte[] byteNow)
throws IOException {
List<Cell> result = get(get, false);
if (result.size() < count) {
// Nothing to delete
CellUtil.updateLatestStamp(cell, byteNow, 0);
return;
}
if (result.size() > count) {
throw new RuntimeException("Unexpected size: " + result.size());
}
Cell getCell = result.get(count - 1);
CellUtil.setTimestamp(cell, getCell.getTimestamp());
}
@Override
public void put(Put put) throws IOException {
checkReadOnly();
// Do a rough check that we have resources to accept a write. The check is
// 'rough' in that between the resource check and the call to obtain a
// read lock, resources may run out. For now, the thought is that this
// will be extremely rare; we'll deal with it when it happens.
checkResources();
startRegionOperation(Operation.PUT);
try {
// All edits for the given row (across all column families) must happen atomically.
doBatchMutate(put);
} finally {
closeRegionOperation(Operation.PUT);
}
}
/**
* Struct-like class that tracks the progress of a batch operation,
* accumulating status codes and tracking the index at which processing
* is proceeding.
*/
private abstract static class BatchOperationInProgress<T> {
T[] operations;
int nextIndexToProcess = 0;
OperationStatus[] retCodeDetails;
WALEdit[] walEditsFromCoprocessors;
public BatchOperationInProgress(T[] operations) {
this.operations = operations;
this.retCodeDetails = new OperationStatus[operations.length];
this.walEditsFromCoprocessors = new WALEdit[operations.length];
Arrays.fill(this.retCodeDetails, OperationStatus.NOT_RUN);
}
public abstract Mutation getMutation(int index);
public abstract long getNonceGroup(int index);
public abstract long getNonce(int index);
/** This method is potentially expensive and should only be used for non-replay CP path. */
public abstract Mutation[] getMutationsForCoprocs();
public abstract boolean isInReplay();
public abstract long getReplaySequenceId();
public boolean isDone() {
return nextIndexToProcess == operations.length;
}
}
private static class MutationBatch extends BatchOperationInProgress<Mutation> {
private long nonceGroup;
private long nonce;
public MutationBatch(Mutation[] operations, long nonceGroup, long nonce) {
super(operations);
this.nonceGroup = nonceGroup;
this.nonce = nonce;
}
@Override
public Mutation getMutation(int index) {
return this.operations[index];
}
@Override
public long getNonceGroup(int index) {
return nonceGroup;
}
@Override
public long getNonce(int index) {
return nonce;
}
@Override
public Mutation[] getMutationsForCoprocs() {
return this.operations;
}
@Override
public boolean isInReplay() {
return false;
}
@Override
public long getReplaySequenceId() {
return 0;
}
}
private static class ReplayBatch extends BatchOperationInProgress<MutationReplay> {
private long replaySeqId = 0;
public ReplayBatch(MutationReplay[] operations, long seqId) {
super(operations);
this.replaySeqId = seqId;
}
@Override
public Mutation getMutation(int index) {
return this.operations[index].mutation;
}
@Override
public long getNonceGroup(int index) {
return this.operations[index].nonceGroup;
}
@Override
public long getNonce(int index) {
return this.operations[index].nonce;
}
@Override
public Mutation[] getMutationsForCoprocs() {
assert false;
throw new RuntimeException("Should not be called for replay batch");
}
@Override
public boolean isInReplay() {
return true;
}
@Override
public long getReplaySequenceId() {
return this.replaySeqId;
}
}
@Override
public OperationStatus[] batchMutate(Mutation[] mutations, long nonceGroup, long nonce)
throws IOException {
// As it stands, this is used for 3 things
// * batchMutate with single mutation - put/delete, separate or from checkAndMutate.
// * coprocessor calls (see ex. BulkDeleteEndpoint).
// So nonces are not really ever used by HBase. They could be by coprocs, and checkAnd...
return batchMutate(new MutationBatch(mutations, nonceGroup, nonce));
}
public OperationStatus[] batchMutate(Mutation[] mutations) throws IOException {
return batchMutate(mutations, HConstants.NO_NONCE, HConstants.NO_NONCE);
}
@Override
public OperationStatus[] batchReplay(MutationReplay[] mutations, long replaySeqId)
throws IOException {
if (!RegionReplicaUtil.isDefaultReplica(getRegionInfo())
&& replaySeqId < lastReplayedOpenRegionSeqId) {
// if it is a secondary replica we should ignore these entries silently
// since they are coming out of order
if (LOG.isTraceEnabled()) {
LOG.trace(getRegionInfo().getEncodedName() + " : "
+ "Skipping " + mutations.length + " mutations with replaySeqId=" + replaySeqId
+ " which is < than lastReplayedOpenRegionSeqId=" + lastReplayedOpenRegionSeqId);
for (MutationReplay mut : mutations) {
LOG.trace(getRegionInfo().getEncodedName() + " : Skipping : " + mut.mutation);
}
}
OperationStatus[] statuses = new OperationStatus[mutations.length];
for (int i = 0; i < statuses.length; i++) {
statuses[i] = OperationStatus.SUCCESS;
}
return statuses;
}
return batchMutate(new ReplayBatch(mutations, replaySeqId));
}
/**
* Perform a batch of mutations.
* It supports only Put and Delete mutations and will ignore other types passed.
* @param batchOp contains the list of mutations
* @return an array of OperationStatus which internally contains the
* OperationStatusCode and the exceptionMessage if any.
* @throws IOException
*/
OperationStatus[] batchMutate(BatchOperationInProgress<?> batchOp) throws IOException {
boolean initialized = false;
Operation op = batchOp.isInReplay() ? Operation.REPLAY_BATCH_MUTATE : Operation.BATCH_MUTATE;
startRegionOperation(op);
try {
while (!batchOp.isDone()) {
if (!batchOp.isInReplay()) {
checkReadOnly();
}
checkResources();
if (!initialized) {
this.writeRequestsCount.add(batchOp.operations.length);
if (!batchOp.isInReplay()) {
doPreMutationHook(batchOp);
}
initialized = true;
}
long addedSize = doMiniBatchMutation(batchOp);
long newSize = this.addAndGetGlobalMemstoreSize(addedSize);
if (isFlushSize(newSize)) {
requestFlush();
}
}
} finally {
closeRegionOperation(op);
}
return batchOp.retCodeDetails;
}
private void doPreMutationHook(BatchOperationInProgress<?> batchOp)
throws IOException {
/* Run coprocessor pre hook outside of locks to avoid deadlock */
WALEdit walEdit = new WALEdit();
if (coprocessorHost != null) {
for (int i = 0 ; i < batchOp.operations.length; i++) {
Mutation m = batchOp.getMutation(i);
if (m instanceof Put) {
if (coprocessorHost.prePut((Put) m, walEdit, m.getDurability())) {
// pre hook says skip this Put
// mark as success and skip in doMiniBatchMutation
batchOp.retCodeDetails[i] = OperationStatus.SUCCESS;
}
} else if (m instanceof Delete) {
Delete curDel = (Delete) m;
if (curDel.getFamilyCellMap().isEmpty()) {
// handle deleting a row case
prepareDelete(curDel);
}
if (coprocessorHost.preDelete(curDel, walEdit, m.getDurability())) {
// pre hook says skip this Delete
// mark as success and skip in doMiniBatchMutation
batchOp.retCodeDetails[i] = OperationStatus.SUCCESS;
}
} else {
// In case of passing Append mutations along with the Puts and Deletes in batchMutate
// mark the operation return code as failure so that it will not be considered in
// the doMiniBatchMutation
batchOp.retCodeDetails[i] = new OperationStatus(OperationStatusCode.FAILURE,
"Put/Delete mutations only supported in batchMutate() now");
}
if (!walEdit.isEmpty()) {
batchOp.walEditsFromCoprocessors[i] = walEdit;
walEdit = new WALEdit();
}
}
}
}
@SuppressWarnings("unchecked")
private long doMiniBatchMutation(BatchOperationInProgress<?> batchOp) throws IOException {
boolean isInReplay = batchOp.isInReplay();
// variable to note if all Put items are for the same CF -- metrics related
boolean putsCfSetConsistent = true;
//The set of columnFamilies first seen for Put.
Set<byte[]> putsCfSet = null;
// variable to note if all Delete items are for the same CF -- metrics related
boolean deletesCfSetConsistent = true;
//The set of columnFamilies first seen for Delete.
Set<byte[]> deletesCfSet = null;
long currentNonceGroup = HConstants.NO_NONCE, currentNonce = HConstants.NO_NONCE;
WALEdit walEdit = new WALEdit(isInReplay);
MultiVersionConsistencyControl.WriteEntry w = null;
long txid = 0;
boolean doRollBackMemstore = false;
boolean locked = false;
/** Keep track of the locks we hold so we can release them in finally clause */
List<RowLock> acquiredRowLocks = Lists.newArrayListWithCapacity(batchOp.operations.length);
// reference family maps directly so coprocessors can mutate them if desired
Map<byte[], List<Cell>>[] familyMaps = new Map[batchOp.operations.length];
List<Cell> memstoreCells = new ArrayList<Cell>();
// We try to set up a batch in the range [firstIndex,lastIndexExclusive)
int firstIndex = batchOp.nextIndexToProcess;
int lastIndexExclusive = firstIndex;
boolean success = false;
int noOfPuts = 0, noOfDeletes = 0;
WALKey walKey = null;
long mvccNum = 0;
try {
// ------------------------------------
// STEP 1. Try to acquire as many locks as we can, and ensure
// we acquire at least one.
// ----------------------------------
int numReadyToWrite = 0;
long now = EnvironmentEdgeManager.currentTime();
while (lastIndexExclusive < batchOp.operations.length) {
Mutation mutation = batchOp.getMutation(lastIndexExclusive);
boolean isPutMutation = mutation instanceof Put;
Map<byte[], List<Cell>> familyMap = mutation.getFamilyCellMap();
// store the family map reference to allow for mutations
familyMaps[lastIndexExclusive] = familyMap;
// skip anything that "ran" already
if (batchOp.retCodeDetails[lastIndexExclusive].getOperationStatusCode()
!= OperationStatusCode.NOT_RUN) {
lastIndexExclusive++;
continue;
}
try {
if (isPutMutation) {
// Check the families in the put. If bad, skip this one.
if (isInReplay) {
removeNonExistentColumnFamilyForReplay(familyMap);
} else {
checkFamilies(familyMap.keySet());
}
checkTimestamps(mutation.getFamilyCellMap(), now);
} else {
prepareDelete((Delete) mutation);
}
checkRow(mutation.getRow(), "doMiniBatchMutation");
} catch (NoSuchColumnFamilyException nscf) {
LOG.warn("No such column family in batch mutation", nscf);
batchOp.retCodeDetails[lastIndexExclusive] = new OperationStatus(
OperationStatusCode.BAD_FAMILY, nscf.getMessage());
lastIndexExclusive++;
continue;
} catch (FailedSanityCheckException fsce) {
LOG.warn("Batch Mutation did not pass sanity check", fsce);
batchOp.retCodeDetails[lastIndexExclusive] = new OperationStatus(
OperationStatusCode.SANITY_CHECK_FAILURE, fsce.getMessage());
lastIndexExclusive++;
continue;
} catch (WrongRegionException we) {
LOG.warn("Batch mutation had a row that does not belong to this region", we);
batchOp.retCodeDetails[lastIndexExclusive] = new OperationStatus(
OperationStatusCode.SANITY_CHECK_FAILURE, we.getMessage());
lastIndexExclusive++;
continue;
}
// If we haven't got any rows in our batch, we should block to
// get the next one.
boolean shouldBlock = numReadyToWrite == 0;
RowLock rowLock = null;
try {
rowLock = getRowLockInternal(mutation.getRow(), shouldBlock);
} catch (IOException ioe) {
LOG.warn("Failed getting lock in batch put, row="
+ Bytes.toStringBinary(mutation.getRow()), ioe);
}
if (rowLock == null) {
// We failed to grab another lock
assert !shouldBlock : "Should never fail to get lock when blocking";
break; // stop acquiring more rows for this batch
} else {
acquiredRowLocks.add(rowLock);
}
lastIndexExclusive++;
numReadyToWrite++;
if (isPutMutation) {
// If Column Families stay consistent through out all of the
// individual puts then metrics can be reported as a mutliput across
// column families in the first put.
if (putsCfSet == null) {
putsCfSet = mutation.getFamilyCellMap().keySet();
} else {
putsCfSetConsistent = putsCfSetConsistent
&& mutation.getFamilyCellMap().keySet().equals(putsCfSet);
}
} else {
if (deletesCfSet == null) {
deletesCfSet = mutation.getFamilyCellMap().keySet();
} else {
deletesCfSetConsistent = deletesCfSetConsistent
&& mutation.getFamilyCellMap().keySet().equals(deletesCfSet);
}
}
}
// we should record the timestamp only after we have acquired the rowLock,
// otherwise, newer puts/deletes are not guaranteed to have a newer timestamp
now = EnvironmentEdgeManager.currentTime();
byte[] byteNow = Bytes.toBytes(now);
// Nothing to put/delete -- an exception in the above such as NoSuchColumnFamily?
if (numReadyToWrite <= 0) return 0L;
// We've now grabbed as many mutations off the list as we can
// ------------------------------------
// STEP 2. Update any LATEST_TIMESTAMP timestamps
// ----------------------------------
for (int i = firstIndex; !isInReplay && i < lastIndexExclusive; i++) {
// skip invalid
if (batchOp.retCodeDetails[i].getOperationStatusCode()
!= OperationStatusCode.NOT_RUN) continue;
Mutation mutation = batchOp.getMutation(i);
if (mutation instanceof Put) {
updateCellTimestamps(familyMaps[i].values(), byteNow);
noOfPuts++;
} else {
prepareDeleteTimestamps(mutation, familyMaps[i], byteNow);
noOfDeletes++;
}
rewriteCellTags(familyMaps[i], mutation);
}
lock(this.updatesLock.readLock(), numReadyToWrite);
locked = true;
if(isInReplay) {
mvccNum = batchOp.getReplaySequenceId();
} else {
mvccNum = MultiVersionConsistencyControl.getPreAssignedWriteNumber(this.sequenceId);
}
//
// ------------------------------------
// Acquire the latest mvcc number
// ----------------------------------
w = mvcc.beginMemstoreInsertWithSeqNum(mvccNum);
// calling the pre CP hook for batch mutation
if (!isInReplay && coprocessorHost != null) {
MiniBatchOperationInProgress<Mutation> miniBatchOp =
new MiniBatchOperationInProgress<Mutation>(batchOp.getMutationsForCoprocs(),
batchOp.retCodeDetails, batchOp.walEditsFromCoprocessors, firstIndex, lastIndexExclusive);
if (coprocessorHost.preBatchMutate(miniBatchOp)) return 0L;
}
// ------------------------------------
// STEP 3. Write back to memstore
// Write to memstore. It is ok to write to memstore
// first without updating the WAL because we do not roll
// forward the memstore MVCC. The MVCC will be moved up when
// the complete operation is done. These changes are not yet
// visible to scanners till we update the MVCC. The MVCC is
// moved only when the sync is complete.
// ----------------------------------
long addedSize = 0;
for (int i = firstIndex; i < lastIndexExclusive; i++) {
if (batchOp.retCodeDetails[i].getOperationStatusCode()
!= OperationStatusCode.NOT_RUN) {
continue;
}
doRollBackMemstore = true; // If we have a failure, we need to clean what we wrote
addedSize += applyFamilyMapToMemstore(familyMaps[i], mvccNum, memstoreCells, isInReplay);
}
// ------------------------------------
// STEP 4. Build WAL edit
// ----------------------------------
Durability durability = Durability.USE_DEFAULT;
for (int i = firstIndex; i < lastIndexExclusive; i++) {
// Skip puts that were determined to be invalid during preprocessing
if (batchOp.retCodeDetails[i].getOperationStatusCode()
!= OperationStatusCode.NOT_RUN) {
continue;
}
batchOp.retCodeDetails[i] = OperationStatus.SUCCESS;
Mutation m = batchOp.getMutation(i);
Durability tmpDur = getEffectiveDurability(m.getDurability());
if (tmpDur.ordinal() > durability.ordinal()) {
durability = tmpDur;
}
if (tmpDur == Durability.SKIP_WAL) {
recordMutationWithoutWal(m.getFamilyCellMap());
continue;
}
long nonceGroup = batchOp.getNonceGroup(i), nonce = batchOp.getNonce(i);
// In replay, the batch may contain multiple nonces. If so, write WALEdit for each.
// Given how nonces are originally written, these should be contiguous.
// They don't have to be, it will still work, just write more WALEdits than needed.
if (nonceGroup != currentNonceGroup || nonce != currentNonce) {
if (walEdit.size() > 0) {
assert isInReplay;
if (!isInReplay) {
throw new IOException("Multiple nonces per batch and not in replay");
}
// txid should always increase, so having the one from the last call is ok.
// we use HLogKey here instead of WALKey directly to support legacy coprocessors.
walKey = new ReplayHLogKey(this.getRegionInfo().getEncodedNameAsBytes(),
this.htableDescriptor.getTableName(), now, m.getClusterIds(),
currentNonceGroup, currentNonce);
txid = this.wal.append(this.htableDescriptor, this.getRegionInfo(), walKey,
walEdit, getSequenceId(), true, null);
walEdit = new WALEdit(isInReplay);
walKey = null;
}
currentNonceGroup = nonceGroup;
currentNonce = nonce;
}
// Add WAL edits by CP
WALEdit fromCP = batchOp.walEditsFromCoprocessors[i];
if (fromCP != null) {
for (Cell cell : fromCP.getCells()) {
walEdit.add(cell);
}
}
addFamilyMapToWALEdit(familyMaps[i], walEdit);
}
// -------------------------
// STEP 5. Append the final edit to WAL. Do not sync wal.
// -------------------------
Mutation mutation = batchOp.getMutation(firstIndex);
if (isInReplay) {
// use wal key from the original
walKey = new ReplayHLogKey(this.getRegionInfo().getEncodedNameAsBytes(),
this.htableDescriptor.getTableName(), WALKey.NO_SEQUENCE_ID, now,
mutation.getClusterIds(), currentNonceGroup, currentNonce);
long replaySeqId = batchOp.getReplaySequenceId();
walKey.setOrigLogSeqNum(replaySeqId);
// ensure that the sequence id of the region is at least as big as orig log seq id
while (true) {
long seqId = getSequenceId().get();
if (seqId >= replaySeqId) break;
if (getSequenceId().compareAndSet(seqId, replaySeqId)) break;
}
}
if (walEdit.size() > 0) {
if (!isInReplay) {
// we use HLogKey here instead of WALKey directly to support legacy coprocessors.
walKey = new HLogKey(this.getRegionInfo().getEncodedNameAsBytes(),
this.htableDescriptor.getTableName(), WALKey.NO_SEQUENCE_ID, now,
mutation.getClusterIds(), currentNonceGroup, currentNonce);
}
txid = this.wal.append(this.htableDescriptor, this.getRegionInfo(), walKey, walEdit,
getSequenceId(), true, memstoreCells);
}
if(walKey == null){
// Append a faked WALEdit in order for SKIP_WAL updates to get mvcc assigned
walKey = this.appendEmptyEdit(this.wal, memstoreCells);
}
// -------------------------------
// STEP 6. Release row locks, etc.
// -------------------------------
if (locked) {
this.updatesLock.readLock().unlock();
locked = false;
}
releaseRowLocks(acquiredRowLocks);
// -------------------------
// STEP 7. Sync wal.
// -------------------------
if (txid != 0) {
syncOrDefer(txid, durability);
}
doRollBackMemstore = false;
// calling the post CP hook for batch mutation
if (!isInReplay && coprocessorHost != null) {
MiniBatchOperationInProgress<Mutation> miniBatchOp =
new MiniBatchOperationInProgress<Mutation>(batchOp.getMutationsForCoprocs(),
batchOp.retCodeDetails, batchOp.walEditsFromCoprocessors, firstIndex, lastIndexExclusive);
coprocessorHost.postBatchMutate(miniBatchOp);
}
// ------------------------------------------------------------------
// STEP 8. Advance mvcc. This will make this put visible to scanners and getters.
// ------------------------------------------------------------------
if (w != null) {
mvcc.completeMemstoreInsertWithSeqNum(w, walKey);
w = null;
}
// ------------------------------------
// STEP 9. Run coprocessor post hooks. This should be done after the wal is
// synced so that the coprocessor contract is adhered to.
// ------------------------------------
if (!isInReplay && coprocessorHost != null) {
for (int i = firstIndex; i < lastIndexExclusive; i++) {
// only for successful puts
if (batchOp.retCodeDetails[i].getOperationStatusCode()
!= OperationStatusCode.SUCCESS) {
continue;
}
Mutation m = batchOp.getMutation(i);
if (m instanceof Put) {
coprocessorHost.postPut((Put) m, walEdit, m.getDurability());
} else {
coprocessorHost.postDelete((Delete) m, walEdit, m.getDurability());
}
}
}
success = true;
return addedSize;
} finally {
// if the wal sync was unsuccessful, remove keys from memstore
if (doRollBackMemstore) {
rollbackMemstore(memstoreCells);
}
if (w != null) {
mvcc.completeMemstoreInsertWithSeqNum(w, walKey);
}
if (locked) {
this.updatesLock.readLock().unlock();
}
releaseRowLocks(acquiredRowLocks);
// See if the column families were consistent through the whole thing.
// if they were then keep them. If they were not then pass a null.
// null will be treated as unknown.
// Total time taken might be involving Puts and Deletes.
// Split the time for puts and deletes based on the total number of Puts and Deletes.
if (noOfPuts > 0) {
// There were some Puts in the batch.
if (this.metricsRegion != null) {
this.metricsRegion.updatePut();
}
}
if (noOfDeletes > 0) {
// There were some Deletes in the batch.
if (this.metricsRegion != null) {
this.metricsRegion.updateDelete();
}
}
if (!success) {
for (int i = firstIndex; i < lastIndexExclusive; i++) {
if (batchOp.retCodeDetails[i].getOperationStatusCode() == OperationStatusCode.NOT_RUN) {
batchOp.retCodeDetails[i] = OperationStatus.FAILURE;
}
}
}
if (coprocessorHost != null && !batchOp.isInReplay()) {
// call the coprocessor hook to do any finalization steps
// after the put is done
MiniBatchOperationInProgress<Mutation> miniBatchOp =
new MiniBatchOperationInProgress<Mutation>(batchOp.getMutationsForCoprocs(),
batchOp.retCodeDetails, batchOp.walEditsFromCoprocessors, firstIndex,
lastIndexExclusive);
coprocessorHost.postBatchMutateIndispensably(miniBatchOp, success);
}
batchOp.nextIndexToProcess = lastIndexExclusive;
}
}
/**
* Returns effective durability from the passed durability and
* the table descriptor.
*/
protected Durability getEffectiveDurability(Durability d) {
return d == Durability.USE_DEFAULT ? this.durability : d;
}
//TODO, Think that gets/puts and deletes should be refactored a bit so that
//the getting of the lock happens before, so that you would just pass it into
//the methods. So in the case of checkAndMutate you could just do lockRow,
//get, put, unlockRow or something
@Override
public boolean checkAndMutate(byte [] row, byte [] family, byte [] qualifier,
CompareOp compareOp, ByteArrayComparable comparator, Mutation w,
boolean writeToWAL)
throws IOException{
checkReadOnly();
//TODO, add check for value length or maybe even better move this to the
//client if this becomes a global setting
checkResources();
boolean isPut = w instanceof Put;
if (!isPut && !(w instanceof Delete))
throw new org.apache.hadoop.hbase.DoNotRetryIOException("Action must " +
"be Put or Delete");
if (!Bytes.equals(row, w.getRow())) {
throw new org.apache.hadoop.hbase.DoNotRetryIOException("Action's " +
"getRow must match the passed row");
}
startRegionOperation();
try {
Get get = new Get(row);
checkFamily(family);
get.addColumn(family, qualifier);
// Lock row - note that doBatchMutate will relock this row if called
RowLock rowLock = getRowLock(get.getRow());
// wait for all previous transactions to complete (with lock held)
mvcc.waitForPreviousTransactionsComplete();
try {
if (this.getCoprocessorHost() != null) {
Boolean processed = null;
if (w instanceof Put) {
processed = this.getCoprocessorHost().preCheckAndPutAfterRowLock(row, family,
qualifier, compareOp, comparator, (Put) w);
} else if (w instanceof Delete) {
processed = this.getCoprocessorHost().preCheckAndDeleteAfterRowLock(row, family,
qualifier, compareOp, comparator, (Delete) w);
}
if (processed != null) {
return processed;
}
}
List<Cell> result = get(get, false);
boolean valueIsNull = comparator.getValue() == null ||
comparator.getValue().length == 0;
boolean matches = false;
if (result.size() == 0 && valueIsNull) {
matches = true;
} else if (result.size() > 0 && result.get(0).getValueLength() == 0 &&
valueIsNull) {
matches = true;
} else if (result.size() == 1 && !valueIsNull) {
Cell kv = result.get(0);
int compareResult = comparator.compareTo(kv.getValueArray(),
kv.getValueOffset(), kv.getValueLength());
switch (compareOp) {
case LESS:
matches = compareResult < 0;
break;
case LESS_OR_EQUAL:
matches = compareResult <= 0;
break;
case EQUAL:
matches = compareResult == 0;
break;
case NOT_EQUAL:
matches = compareResult != 0;
break;
case GREATER_OR_EQUAL:
matches = compareResult >= 0;
break;
case GREATER:
matches = compareResult > 0;
break;
default:
throw new RuntimeException("Unknown Compare op " + compareOp.name());
}
}
//If matches put the new put or delete the new delete
if (matches) {
// All edits for the given row (across all column families) must
// happen atomically.
doBatchMutate(w);
this.checkAndMutateChecksPassed.increment();
return true;
}
this.checkAndMutateChecksFailed.increment();
return false;
} finally {
rowLock.release();
}
} finally {
closeRegionOperation();
}
}
//TODO, Think that gets/puts and deletes should be refactored a bit so that
//the getting of the lock happens before, so that you would just pass it into
//the methods. So in the case of checkAndMutate you could just do lockRow,
//get, put, unlockRow or something
@Override
public boolean checkAndRowMutate(byte [] row, byte [] family, byte [] qualifier,
CompareOp compareOp, ByteArrayComparable comparator, RowMutations rm,
boolean writeToWAL) throws IOException {
checkReadOnly();
//TODO, add check for value length or maybe even better move this to the
//client if this becomes a global setting
checkResources();
startRegionOperation();
try {
Get get = new Get(row);
checkFamily(family);
get.addColumn(family, qualifier);
// Lock row - note that doBatchMutate will relock this row if called
RowLock rowLock = getRowLock(get.getRow());
// wait for all previous transactions to complete (with lock held)
mvcc.waitForPreviousTransactionsComplete();
try {
List<Cell> result = get(get, false);
boolean valueIsNull = comparator.getValue() == null ||
comparator.getValue().length == 0;
boolean matches = false;
if (result.size() == 0 && valueIsNull) {
matches = true;
} else if (result.size() > 0 && result.get(0).getValueLength() == 0 &&
valueIsNull) {
matches = true;
} else if (result.size() == 1 && !valueIsNull) {
Cell kv = result.get(0);
int compareResult = comparator.compareTo(kv.getValueArray(),
kv.getValueOffset(), kv.getValueLength());
switch (compareOp) {
case LESS:
matches = compareResult < 0;
break;
case LESS_OR_EQUAL:
matches = compareResult <= 0;
break;
case EQUAL:
matches = compareResult == 0;
break;
case NOT_EQUAL:
matches = compareResult != 0;
break;
case GREATER_OR_EQUAL:
matches = compareResult >= 0;
break;
case GREATER:
matches = compareResult > 0;
break;
default:
throw new RuntimeException("Unknown Compare op " + compareOp.name());
}
}
//If matches put the new put or delete the new delete
if (matches) {
// All edits for the given row (across all column families) must
// happen atomically.
mutateRow(rm);
this.checkAndMutateChecksPassed.increment();
return true;
}
this.checkAndMutateChecksFailed.increment();
return false;
} finally {
rowLock.release();
}
} finally {
closeRegionOperation();
}
}
private void doBatchMutate(Mutation mutation) throws IOException {
// Currently this is only called for puts and deletes, so no nonces.
OperationStatus[] batchMutate = this.batchMutate(new Mutation[] { mutation });
if (batchMutate[0].getOperationStatusCode().equals(OperationStatusCode.SANITY_CHECK_FAILURE)) {
throw new FailedSanityCheckException(batchMutate[0].getExceptionMsg());
} else if (batchMutate[0].getOperationStatusCode().equals(OperationStatusCode.BAD_FAMILY)) {
throw new NoSuchColumnFamilyException(batchMutate[0].getExceptionMsg());
}
}
/**
* Complete taking the snapshot on the region. Writes the region info and adds references to the
* working snapshot directory.
*
* TODO for api consistency, consider adding another version with no {@link ForeignExceptionSnare}
* arg. (In the future other cancellable HRegion methods could eventually add a
* {@link ForeignExceptionSnare}, or we could do something fancier).
*
* @param desc snapshot description object
* @param exnSnare ForeignExceptionSnare that captures external exceptions in case we need to
* bail out. This is allowed to be null and will just be ignored in that case.
* @throws IOException if there is an external or internal error causing the snapshot to fail
*/
public void addRegionToSnapshot(SnapshotDescription desc,
ForeignExceptionSnare exnSnare) throws IOException {
Path rootDir = FSUtils.getRootDir(conf);
Path snapshotDir = SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir);
SnapshotManifest manifest = SnapshotManifest.create(conf, getFilesystem(),
snapshotDir, desc, exnSnare);
manifest.addRegion(this);
}
@Override
public void updateCellTimestamps(final Iterable<List<Cell>> cellItr, final byte[] now)
throws IOException {
for (List<Cell> cells: cellItr) {
if (cells == null) continue;
assert cells instanceof RandomAccess;
int listSize = cells.size();
for (int i = 0; i < listSize; i++) {
CellUtil.updateLatestStamp(cells.get(i), now, 0);
}
}
}
/**
* Possibly rewrite incoming cell tags.
*/
void rewriteCellTags(Map<byte[], List<Cell>> familyMap, final Mutation m) {
// Check if we have any work to do and early out otherwise
// Update these checks as more logic is added here
if (m.getTTL() == Long.MAX_VALUE) {
return;
}
// From this point we know we have some work to do
for (Map.Entry<byte[], List<Cell>> e: familyMap.entrySet()) {
List<Cell> cells = e.getValue();
assert cells instanceof RandomAccess;
int listSize = cells.size();
for (int i = 0; i < listSize; i++) {
Cell cell = cells.get(i);
List<Tag> newTags = new ArrayList<Tag>();
Iterator<Tag> tagIterator = CellUtil.tagsIterator(cell.getTagsArray(),
cell.getTagsOffset(), cell.getTagsLength());
// Carry forward existing tags
while (tagIterator.hasNext()) {
// Add any filters or tag specific rewrites here
newTags.add(tagIterator.next());
}
// Cell TTL handling
// Check again if we need to add a cell TTL because early out logic
// above may change when there are more tag based features in core.
if (m.getTTL() != Long.MAX_VALUE) {
// Add a cell TTL tag
newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(m.getTTL())));
}
// Rewrite the cell with the updated set of tags
cells.set(i, new KeyValue(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),
cell.getTimestamp(), KeyValue.Type.codeToType(cell.getTypeByte()),
cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(),
newTags));
}
}
}
/*
* Check if resources to support an update.
*
* We throw RegionTooBusyException if above memstore limit
* and expect client to retry using some kind of backoff
*/
private void checkResources() throws RegionTooBusyException {
// If catalog region, do not impose resource constraints or block updates.
if (this.getRegionInfo().isMetaRegion()) return;
if (this.memstoreSize.get() > this.blockingMemStoreSize) {
blockedRequestsCount.increment();
requestFlush();
throw new RegionTooBusyException("Above memstore limit, " +
"regionName=" + (this.getRegionInfo() == null ? "unknown" :
this.getRegionInfo().getRegionNameAsString()) +
", server=" + (this.getRegionServerServices() == null ? "unknown" :
this.getRegionServerServices().getServerName()) +
", memstoreSize=" + memstoreSize.get() +
", blockingMemStoreSize=" + blockingMemStoreSize);
}
}
/**
* @throws IOException Throws exception if region is in read-only mode.
*/
protected void checkReadOnly() throws IOException {
if (isReadOnly()) {
throw new IOException("region is read only");
}
}
protected void checkReadsEnabled() throws IOException {
if (!this.writestate.readsEnabled) {
throw new IOException(getRegionInfo().getEncodedName()
+ ": The region's reads are disabled. Cannot serve the request");
}
}
public void setReadsEnabled(boolean readsEnabled) {
if (readsEnabled && !this.writestate.readsEnabled) {
LOG.info(getRegionInfo().getEncodedName() + " : Enabling reads for region.");
}
this.writestate.setReadsEnabled(readsEnabled);
}
/**
* Add updates first to the wal and then add values to memstore.
* Warning: Assumption is caller has lock on passed in row.
* @param edits Cell updates by column
* @throws IOException
*/
private void put(final byte [] row, byte [] family, List<Cell> edits)
throws IOException {
NavigableMap<byte[], List<Cell>> familyMap;
familyMap = new TreeMap<byte[], List<Cell>>(Bytes.BYTES_COMPARATOR);
familyMap.put(family, edits);
Put p = new Put(row);
p.setFamilyCellMap(familyMap);
doBatchMutate(p);
}
/**
* Atomically apply the given map of family->edits to the memstore.
* This handles the consistency control on its own, but the caller
* should already have locked updatesLock.readLock(). This also does
* <b>not</b> check the families for validity.
*
* @param familyMap Map of kvs per family
* @param localizedWriteEntry The WriteEntry of the MVCC for this transaction.
* If null, then this method internally creates a mvcc transaction.
* @param output newly added KVs into memstore
* @param isInReplay true when adding replayed KVs into memstore
* @return the additional memory usage of the memstore caused by the
* new entries.
* @throws IOException
*/
private long applyFamilyMapToMemstore(Map<byte[], List<Cell>> familyMap,
long mvccNum, List<Cell> memstoreCells, boolean isInReplay) throws IOException {
long size = 0;
for (Map.Entry<byte[], List<Cell>> e : familyMap.entrySet()) {
byte[] family = e.getKey();
List<Cell> cells = e.getValue();
assert cells instanceof RandomAccess;
Store store = getStore(family);
int listSize = cells.size();
for (int i=0; i < listSize; i++) {
Cell cell = cells.get(i);
CellUtil.setSequenceId(cell, mvccNum);
Pair<Long, Cell> ret = store.add(cell);
size += ret.getFirst();
memstoreCells.add(ret.getSecond());
if(isInReplay) {
// set memstore newly added cells with replay mvcc number
CellUtil.setSequenceId(ret.getSecond(), mvccNum);
}
}
}
return size;
}
/**
* Remove all the keys listed in the map from the memstore. This method is
* called when a Put/Delete has updated memstore but subsequently fails to update
* the wal. This method is then invoked to rollback the memstore.
*/
private void rollbackMemstore(List<Cell> memstoreCells) {
int kvsRolledback = 0;
for (Cell cell : memstoreCells) {
byte[] family = CellUtil.cloneFamily(cell);
Store store = getStore(family);
store.rollback(cell);
kvsRolledback++;
}
LOG.debug("rollbackMemstore rolled back " + kvsRolledback);
}
@Override
public void checkFamilies(Collection<byte[]> families) throws NoSuchColumnFamilyException {
for (byte[] family : families) {
checkFamily(family);
}
}
/**
* During replay, there could exist column families which are removed between region server
* failure and replay
*/
private void removeNonExistentColumnFamilyForReplay(
final Map<byte[], List<Cell>> familyMap) {
List<byte[]> nonExistentList = null;
for (byte[] family : familyMap.keySet()) {
if (!this.htableDescriptor.hasFamily(family)) {
if (nonExistentList == null) {
nonExistentList = new ArrayList<byte[]>();
}
nonExistentList.add(family);
}
}
if (nonExistentList != null) {
for (byte[] family : nonExistentList) {
// Perhaps schema was changed between crash and replay
LOG.info("No family for " + Bytes.toString(family) + " omit from reply.");
familyMap.remove(family);
}
}
}
@Override
public void checkTimestamps(final Map<byte[], List<Cell>> familyMap, long now)
throws FailedSanityCheckException {
if (timestampSlop == HConstants.LATEST_TIMESTAMP) {
return;
}
long maxTs = now + timestampSlop;
for (List<Cell> kvs : familyMap.values()) {
assert kvs instanceof RandomAccess;
int listSize = kvs.size();
for (int i=0; i < listSize; i++) {
Cell cell = kvs.get(i);
// see if the user-side TS is out of range. latest = server-side
long ts = cell.getTimestamp();
if (ts != HConstants.LATEST_TIMESTAMP && ts > maxTs) {
throw new FailedSanityCheckException("Timestamp for KV out of range "
+ cell + " (too.new=" + timestampSlop + ")");
}
}
}
}
/**
* Append the given map of family->edits to a WALEdit data structure.
* This does not write to the WAL itself.
* @param familyMap map of family->edits
* @param walEdit the destination entry to append into
*/
private void addFamilyMapToWALEdit(Map<byte[], List<Cell>> familyMap,
WALEdit walEdit) {
for (List<Cell> edits : familyMap.values()) {
assert edits instanceof RandomAccess;
int listSize = edits.size();
for (int i=0; i < listSize; i++) {
Cell cell = edits.get(i);
walEdit.add(cell);
}
}
}
private void requestFlush() {
if (this.rsServices == null) {
return;
}
synchronized (writestate) {
if (this.writestate.isFlushRequested()) {
return;
}
writestate.flushRequested = true;
}
// Make request outside of synchronize block; HBASE-818.
this.rsServices.getFlushRequester().requestFlush(this, false);
if (LOG.isDebugEnabled()) {
LOG.debug("Flush requested on " + this);
}
}
/*
* @param size
* @return True if size is over the flush threshold
*/
private boolean isFlushSize(final long size) {
return size > this.memstoreFlushSize;
}
/**
* Read the edits put under this region by wal splitting process. Put
* the recovered edits back up into this region.
*
* <p>We can ignore any wal message that has a sequence ID that's equal to or
* lower than minSeqId. (Because we know such messages are already
* reflected in the HFiles.)
*
* <p>While this is running we are putting pressure on memory yet we are
* outside of our usual accounting because we are not yet an onlined region
* (this stuff is being run as part of Region initialization). This means
* that if we're up against global memory limits, we'll not be flagged to flush
* because we are not online. We can't be flushed by usual mechanisms anyways;
* we're not yet online so our relative sequenceids are not yet aligned with
* WAL sequenceids -- not till we come up online, post processing of split
* edits.
*
* <p>But to help relieve memory pressure, at least manage our own heap size
* flushing if are in excess of per-region limits. Flushing, though, we have
* to be careful and avoid using the regionserver/wal sequenceid. Its running
* on a different line to whats going on in here in this region context so if we
* crashed replaying these edits, but in the midst had a flush that used the
* regionserver wal with a sequenceid in excess of whats going on in here
* in this region and with its split editlogs, then we could miss edits the
* next time we go to recover. So, we have to flush inline, using seqids that
* make sense in a this single region context only -- until we online.
*
* @param maxSeqIdInStores Any edit found in split editlogs needs to be in excess of
* the maxSeqId for the store to be applied, else its skipped.
* @return the sequence id of the last edit added to this region out of the
* recovered edits log or <code>minSeqId</code> if nothing added from editlogs.
* @throws UnsupportedEncodingException
* @throws IOException
*/
protected long replayRecoveredEditsIfAny(final Path regiondir,
Map<byte[], Long> maxSeqIdInStores,
final CancelableProgressable reporter, final MonitoredTask status)
throws IOException {
long minSeqIdForTheRegion = -1;
for (Long maxSeqIdInStore : maxSeqIdInStores.values()) {
if (maxSeqIdInStore < minSeqIdForTheRegion || minSeqIdForTheRegion == -1) {
minSeqIdForTheRegion = maxSeqIdInStore;
}
}
long seqid = minSeqIdForTheRegion;
FileSystem fs = this.fs.getFileSystem();
NavigableSet<Path> files = WALSplitter.getSplitEditFilesSorted(fs, regiondir);
if (LOG.isDebugEnabled()) {
LOG.debug("Found " + (files == null ? 0 : files.size())
+ " recovered edits file(s) under " + regiondir);
}
if (files == null || files.isEmpty()) return seqid;
for (Path edits: files) {
if (edits == null || !fs.exists(edits)) {
LOG.warn("Null or non-existent edits file: " + edits);
continue;
}
if (isZeroLengthThenDelete(fs, edits)) continue;
long maxSeqId;
String fileName = edits.getName();
maxSeqId = Math.abs(Long.parseLong(fileName));
if (maxSeqId <= minSeqIdForTheRegion) {
if (LOG.isDebugEnabled()) {
String msg = "Maximum sequenceid for this wal is " + maxSeqId
+ " and minimum sequenceid for the region is " + minSeqIdForTheRegion
+ ", skipped the whole file, path=" + edits;
LOG.debug(msg);
}
continue;
}
try {
// replay the edits. Replay can return -1 if everything is skipped, only update
// if seqId is greater
seqid = Math.max(seqid, replayRecoveredEdits(edits, maxSeqIdInStores, reporter));
} catch (IOException e) {
boolean skipErrors = conf.getBoolean(
HConstants.HREGION_EDITS_REPLAY_SKIP_ERRORS,
conf.getBoolean(
"hbase.skip.errors",
HConstants.DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS));
if (conf.get("hbase.skip.errors") != null) {
LOG.warn(
"The property 'hbase.skip.errors' has been deprecated. Please use " +
HConstants.HREGION_EDITS_REPLAY_SKIP_ERRORS + " instead.");
}
if (skipErrors) {
Path p = WALSplitter.moveAsideBadEditsFile(fs, edits);
LOG.error(HConstants.HREGION_EDITS_REPLAY_SKIP_ERRORS
+ "=true so continuing. Renamed " + edits +
" as " + p, e);
} else {
throw e;
}
}
}
// The edits size added into rsAccounting during this replaying will not
// be required any more. So just clear it.
if (this.rsAccounting != null) {
this.rsAccounting.clearRegionReplayEditsSize(getRegionInfo().getRegionName());
}
if (seqid > minSeqIdForTheRegion) {
// Then we added some edits to memory. Flush and cleanup split edit files.
internalFlushcache(null, seqid, stores.values(), status, false);
}
// Now delete the content of recovered edits. We're done w/ them.
if (files.size() > 0 && this.conf.getBoolean("hbase.region.archive.recovered.edits", false)) {
// For debugging data loss issues!
// If this flag is set, make use of the hfile archiving by making recovered.edits a fake
// column family. Have to fake out file type too by casting our recovered.edits as storefiles
String fakeFamilyName = WALSplitter.getRegionDirRecoveredEditsDir(regiondir).getName();
Set<StoreFile> fakeStoreFiles = new HashSet<StoreFile>(files.size());
for (Path file: files) {
fakeStoreFiles.add(new StoreFile(getRegionFileSystem().getFileSystem(), file, this.conf,
null, null));
}
getRegionFileSystem().removeStoreFiles(fakeFamilyName, fakeStoreFiles);
} else {
for (Path file: files) {
if (!fs.delete(file, false)) {
LOG.error("Failed delete of " + file);
} else {
LOG.debug("Deleted recovered.edits file=" + file);
}
}
}
return seqid;
}
/*
* @param edits File of recovered edits.
* @param maxSeqIdInStores Maximum sequenceid found in each store. Edits in wal
* must be larger than this to be replayed for each store.
* @param reporter
* @return the sequence id of the last edit added to this region out of the
* recovered edits log or <code>minSeqId</code> if nothing added from editlogs.
* @throws IOException
*/
private long replayRecoveredEdits(final Path edits,
Map<byte[], Long> maxSeqIdInStores, final CancelableProgressable reporter)
throws IOException {
String msg = "Replaying edits from " + edits;
LOG.info(msg);
MonitoredTask status = TaskMonitor.get().createStatus(msg);
FileSystem fs = this.fs.getFileSystem();
status.setStatus("Opening recovered edits");
WAL.Reader reader = null;
try {
reader = WALFactory.createReader(fs, edits, conf);
long currentEditSeqId = -1;
long currentReplaySeqId = -1;
long firstSeqIdInLog = -1;
long skippedEdits = 0;
long editsCount = 0;
long intervalEdits = 0;
WAL.Entry entry;
Store store = null;
boolean reported_once = false;
ServerNonceManager ng = this.rsServices == null ? null : this.rsServices.getNonceManager();
try {
// How many edits seen before we check elapsed time
int interval = this.conf.getInt("hbase.hstore.report.interval.edits", 2000);
// How often to send a progress report (default 1/2 master timeout)
int period = this.conf.getInt("hbase.hstore.report.period", 300000);
long lastReport = EnvironmentEdgeManager.currentTime();
while ((entry = reader.next()) != null) {
WALKey key = entry.getKey();
WALEdit val = entry.getEdit();
if (ng != null) { // some test, or nonces disabled
ng.reportOperationFromWal(key.getNonceGroup(), key.getNonce(), key.getWriteTime());
}
if (reporter != null) {
intervalEdits += val.size();
if (intervalEdits >= interval) {
// Number of edits interval reached
intervalEdits = 0;
long cur = EnvironmentEdgeManager.currentTime();
if (lastReport + period <= cur) {
status.setStatus("Replaying edits..." +
" skipped=" + skippedEdits +
" edits=" + editsCount);
// Timeout reached
if(!reporter.progress()) {
msg = "Progressable reporter failed, stopping replay";
LOG.warn(msg);
status.abort(msg);
throw new IOException(msg);
}
reported_once = true;
lastReport = cur;
}
}
}
if (firstSeqIdInLog == -1) {
firstSeqIdInLog = key.getLogSeqNum();
}
if (currentEditSeqId > key.getLogSeqNum()) {
// when this condition is true, it means we have a serious defect because we need to
// maintain increasing SeqId for WAL edits per region
LOG.error(getRegionInfo().getEncodedName() + " : "
+ "Found decreasing SeqId. PreId=" + currentEditSeqId + " key=" + key
+ "; edit=" + val);
} else {
currentEditSeqId = key.getLogSeqNum();
}
currentReplaySeqId = (key.getOrigLogSeqNum() > 0) ?
key.getOrigLogSeqNum() : currentEditSeqId;
// Start coprocessor replay here. The coprocessor is for each WALEdit
// instead of a KeyValue.
if (coprocessorHost != null) {
status.setStatus("Running pre-WAL-restore hook in coprocessors");
if (coprocessorHost.preWALRestore(this.getRegionInfo(), key, val)) {
// if bypass this wal entry, ignore it ...
continue;
}
}
// Check this edit is for this region.
if (!Bytes.equals(key.getEncodedRegionName(),
this.getRegionInfo().getEncodedNameAsBytes())) {
skippedEdits++;
continue;
}
boolean flush = false;
for (Cell cell: val.getCells()) {
// Check this edit is for me. Also, guard against writing the special
// METACOLUMN info such as HBASE::CACHEFLUSH entries
if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) {
//this is a special edit, we should handle it
CompactionDescriptor compaction = WALEdit.getCompaction(cell);
if (compaction != null) {
//replay the compaction
replayWALCompactionMarker(compaction, false, true, Long.MAX_VALUE);
}
skippedEdits++;
continue;
}
// Figure which store the edit is meant for.
if (store == null || !CellUtil.matchingFamily(cell, store.getFamily().getName())) {
store = getStore(cell);
}
if (store == null) {
// This should never happen. Perhaps schema was changed between
// crash and redeploy?
LOG.warn("No family for " + cell);
skippedEdits++;
continue;
}
// Now, figure if we should skip this edit.
if (key.getLogSeqNum() <= maxSeqIdInStores.get(store.getFamily()
.getName())) {
skippedEdits++;
continue;
}
CellUtil.setSequenceId(cell, currentReplaySeqId);
// Once we are over the limit, restoreEdit will keep returning true to
// flush -- but don't flush until we've played all the kvs that make up
// the WALEdit.
flush |= restoreEdit(store, cell);
editsCount++;
}
if (flush) {
internalFlushcache(null, currentEditSeqId, stores.values(), status, false);
}
if (coprocessorHost != null) {
coprocessorHost.postWALRestore(this.getRegionInfo(), key, val);
}
}
} catch (EOFException eof) {
Path p = WALSplitter.moveAsideBadEditsFile(fs, edits);
msg = "Encountered EOF. Most likely due to Master failure during " +
"wal splitting, so we have this data in another edit. " +
"Continuing, but renaming " + edits + " as " + p;
LOG.warn(msg, eof);
status.abort(msg);
} catch (IOException ioe) {
// If the IOE resulted from bad file format,
// then this problem is idempotent and retrying won't help
if (ioe.getCause() instanceof ParseException) {
Path p = WALSplitter.moveAsideBadEditsFile(fs, edits);
msg = "File corruption encountered! " +
"Continuing, but renaming " + edits + " as " + p;
LOG.warn(msg, ioe);
status.setStatus(msg);
} else {
status.abort(StringUtils.stringifyException(ioe));
// other IO errors may be transient (bad network connection,
// checksum exception on one datanode, etc). throw & retry
throw ioe;
}
}
if (reporter != null && !reported_once) {
reporter.progress();
}
msg = "Applied " + editsCount + ", skipped " + skippedEdits +
", firstSequenceIdInLog=" + firstSeqIdInLog +
", maxSequenceIdInLog=" + currentEditSeqId + ", path=" + edits;
status.markComplete(msg);
LOG.debug(msg);
return currentEditSeqId;
} finally {
status.cleanup();
if (reader != null) {
reader.close();
}
}
}
/**
* Call to complete a compaction. Its for the case where we find in the WAL a compaction
* that was not finished. We could find one recovering a WAL after a regionserver crash.
* See HBASE-2331.
*/
void replayWALCompactionMarker(CompactionDescriptor compaction, boolean pickCompactionFiles,
boolean removeFiles, long replaySeqId)
throws IOException {
checkTargetRegion(compaction.getEncodedRegionName().toByteArray(),
"Compaction marker from WAL ", compaction);
synchronized (writestate) {
if (replaySeqId < lastReplayedOpenRegionSeqId) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying compaction event :" + TextFormat.shortDebugString(compaction)
+ " because its sequence id " + replaySeqId + " is smaller than this regions "
+ "lastReplayedOpenRegionSeqId of " + lastReplayedOpenRegionSeqId);
return;
}
if (replaySeqId < lastReplayedCompactionSeqId) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying compaction event :" + TextFormat.shortDebugString(compaction)
+ " because its sequence id " + replaySeqId + " is smaller than this regions "
+ "lastReplayedCompactionSeqId of " + lastReplayedCompactionSeqId);
return;
} else {
lastReplayedCompactionSeqId = replaySeqId;
}
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ "Replaying compaction marker " + TextFormat.shortDebugString(compaction)
+ " with seqId=" + replaySeqId + " and lastReplayedOpenRegionSeqId="
+ lastReplayedOpenRegionSeqId);
}
startRegionOperation(Operation.REPLAY_EVENT);
try {
Store store = this.getStore(compaction.getFamilyName().toByteArray());
if (store == null) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Found Compaction WAL edit for deleted family:"
+ Bytes.toString(compaction.getFamilyName().toByteArray()));
return;
}
store.replayCompactionMarker(compaction, pickCompactionFiles, removeFiles);
logRegionFiles();
} catch (FileNotFoundException ex) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "At least one of the store files in compaction: "
+ TextFormat.shortDebugString(compaction)
+ " doesn't exist any more. Skip loading the file(s)", ex);
} finally {
closeRegionOperation(Operation.REPLAY_EVENT);
}
}
}
void replayWALFlushMarker(FlushDescriptor flush, long replaySeqId) throws IOException {
checkTargetRegion(flush.getEncodedRegionName().toByteArray(),
"Flush marker from WAL ", flush);
if (ServerRegionReplicaUtil.isDefaultReplica(this.getRegionInfo())) {
return; // if primary nothing to do
}
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ "Replaying flush marker " + TextFormat.shortDebugString(flush));
}
startRegionOperation(Operation.REPLAY_EVENT); // use region close lock to guard against close
try {
FlushAction action = flush.getAction();
switch (action) {
case START_FLUSH:
replayWALFlushStartMarker(flush);
break;
case COMMIT_FLUSH:
replayWALFlushCommitMarker(flush);
break;
case ABORT_FLUSH:
replayWALFlushAbortMarker(flush);
break;
case CANNOT_FLUSH:
replayWALFlushCannotFlushMarker(flush, replaySeqId);
break;
default:
LOG.warn(getRegionInfo().getEncodedName() + " : " +
"Received a flush event with unknown action, ignoring. " +
TextFormat.shortDebugString(flush));
break;
}
logRegionFiles();
} finally {
closeRegionOperation(Operation.REPLAY_EVENT);
}
}
/** Replay the flush marker from primary region by creating a corresponding snapshot of
* the store memstores, only if the memstores do not have a higher seqId from an earlier wal
* edit (because the events may be coming out of order).
*/
@VisibleForTesting
PrepareFlushResult replayWALFlushStartMarker(FlushDescriptor flush) throws IOException {
long flushSeqId = flush.getFlushSequenceNumber();
HashSet<Store> storesToFlush = new HashSet<Store>();
for (StoreFlushDescriptor storeFlush : flush.getStoreFlushesList()) {
byte[] family = storeFlush.getFamilyName().toByteArray();
Store store = getStore(family);
if (store == null) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush start marker from primary, but the family is not found. Ignoring"
+ " StoreFlushDescriptor:" + TextFormat.shortDebugString(storeFlush));
continue;
}
storesToFlush.add(store);
}
MonitoredTask status = TaskMonitor.get().createStatus("Preparing flush " + this);
// we will use writestate as a coarse-grain lock for all the replay events
// (flush, compaction, region open etc)
synchronized (writestate) {
try {
if (flush.getFlushSequenceNumber() < lastReplayedOpenRegionSeqId) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying flush event :" + TextFormat.shortDebugString(flush)
+ " because its sequence id is smaller than this regions lastReplayedOpenRegionSeqId "
+ " of " + lastReplayedOpenRegionSeqId);
return null;
}
if (numMutationsWithoutWAL.get() > 0) {
numMutationsWithoutWAL.set(0);
dataInMemoryWithoutWAL.set(0);
}
if (!writestate.flushing) {
// we do not have an active snapshot and corresponding this.prepareResult. This means
// we can just snapshot our memstores and continue as normal.
// invoke prepareFlushCache. Send null as wal since we do not want the flush events in wal
PrepareFlushResult prepareResult = internalPrepareFlushCache(null,
flushSeqId, storesToFlush, status, false);
if (prepareResult.result == null) {
// save the PrepareFlushResult so that we can use it later from commit flush
this.writestate.flushing = true;
this.prepareFlushResult = prepareResult;
status.markComplete("Flush prepare successful");
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ " Prepared flush with seqId:" + flush.getFlushSequenceNumber());
}
} else {
// special case empty memstore. We will still save the flush result in this case, since
// our memstore ie empty, but the primary is still flushing
if (prepareResult.getResult().getResult() ==
FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY) {
this.writestate.flushing = true;
this.prepareFlushResult = prepareResult;
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ " Prepared empty flush with seqId:" + flush.getFlushSequenceNumber());
}
}
status.abort("Flush prepare failed with " + prepareResult.result);
// nothing much to do. prepare flush failed because of some reason.
}
return prepareResult;
} else {
// we already have an active snapshot.
if (flush.getFlushSequenceNumber() == this.prepareFlushResult.flushOpSeqId) {
// They define the same flush. Log and continue.
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush prepare marker with the same seqId: " +
+ flush.getFlushSequenceNumber() + " before clearing the previous one with seqId: "
+ prepareFlushResult.flushOpSeqId + ". Ignoring");
// ignore
} else if (flush.getFlushSequenceNumber() < this.prepareFlushResult.flushOpSeqId) {
// We received a flush with a smaller seqNum than what we have prepared. We can only
// ignore this prepare flush request.
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush prepare marker with a smaller seqId: " +
+ flush.getFlushSequenceNumber() + " before clearing the previous one with seqId: "
+ prepareFlushResult.flushOpSeqId + ". Ignoring");
// ignore
} else {
// We received a flush with a larger seqNum than what we have prepared
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush prepare marker with a larger seqId: " +
+ flush.getFlushSequenceNumber() + " before clearing the previous one with seqId: "
+ prepareFlushResult.flushOpSeqId + ". Ignoring");
// We do not have multiple active snapshots in the memstore or a way to merge current
// memstore snapshot with the contents and resnapshot for now. We cannot take
// another snapshot and drop the previous one because that will cause temporary
// data loss in the secondary. So we ignore this for now, deferring the resolution
// to happen when we see the corresponding flush commit marker. If we have a memstore
// snapshot with x, and later received another prepare snapshot with y (where x < y),
// when we see flush commit for y, we will drop snapshot for x, and can also drop all
// the memstore edits if everything in memstore is < y. This is the usual case for
// RS crash + recovery where we might see consequtive prepare flush wal markers.
// Otherwise, this will cause more memory to be used in secondary replica until a
// further prapare + commit flush is seen and replayed.
}
}
} finally {
status.cleanup();
writestate.notifyAll();
}
}
return null;
}
@VisibleForTesting
void replayWALFlushCommitMarker(FlushDescriptor flush) throws IOException {
MonitoredTask status = TaskMonitor.get().createStatus("Committing flush " + this);
// check whether we have the memstore snapshot with the corresponding seqId. Replay to
// secondary region replicas are in order, except for when the region moves or then the
// region server crashes. In those cases, we may receive replay requests out of order from
// the original seqIds.
synchronized (writestate) {
try {
if (flush.getFlushSequenceNumber() < lastReplayedOpenRegionSeqId) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying flush event :" + TextFormat.shortDebugString(flush)
+ " because its sequence id is smaller than this regions lastReplayedOpenRegionSeqId "
+ " of " + lastReplayedOpenRegionSeqId);
return;
}
if (writestate.flushing) {
PrepareFlushResult prepareFlushResult = this.prepareFlushResult;
if (flush.getFlushSequenceNumber() == prepareFlushResult.flushOpSeqId) {
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ "Received a flush commit marker with seqId:" + flush.getFlushSequenceNumber()
+ " and a previous prepared snapshot was found");
}
// This is the regular case where we received commit flush after prepare flush
// corresponding to the same seqId.
replayFlushInStores(flush, prepareFlushResult, true);
// Set down the memstore size by amount of flush.
this.addAndGetGlobalMemstoreSize(-prepareFlushResult.totalFlushableSize);
this.prepareFlushResult = null;
writestate.flushing = false;
} else if (flush.getFlushSequenceNumber() < prepareFlushResult.flushOpSeqId) {
// This should not happen normally. However, lets be safe and guard against these cases
// we received a flush commit with a smaller seqId than what we have prepared
// we will pick the flush file up from this commit (if we have not seen it), but we
// will not drop the memstore
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush commit marker with smaller seqId: "
+ flush.getFlushSequenceNumber() + " than what we have prepared with seqId: "
+ prepareFlushResult.flushOpSeqId + ". Picking up new file, but not dropping"
+" prepared memstore snapshot");
replayFlushInStores(flush, prepareFlushResult, false);
// snapshot is not dropped, so memstore sizes should not be decremented
// we still have the prepared snapshot, flushing should still be true
} else {
// This should not happen normally. However, lets be safe and guard against these cases
// we received a flush commit with a larger seqId than what we have prepared
// we will pick the flush file for this. We will also obtain the updates lock and
// look for contents of the memstore to see whether we have edits after this seqId.
// If not, we will drop all the memstore edits and the snapshot as well.
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush commit marker with larger seqId: "
+ flush.getFlushSequenceNumber() + " than what we have prepared with seqId: " +
prepareFlushResult.flushOpSeqId + ". Picking up new file and dropping prepared"
+" memstore snapshot");
replayFlushInStores(flush, prepareFlushResult, true);
// Set down the memstore size by amount of flush.
this.addAndGetGlobalMemstoreSize(-prepareFlushResult.totalFlushableSize);
// Inspect the memstore contents to see whether the memstore contains only edits
// with seqId smaller than the flush seqId. If so, we can discard those edits.
dropMemstoreContentsForSeqId(flush.getFlushSequenceNumber(), null);
this.prepareFlushResult = null;
writestate.flushing = false;
}
// If we were waiting for observing a flush or region opening event for not showing
// partial data after a secondary region crash, we can allow reads now. We can only make
// sure that we are not showing partial data (for example skipping some previous edits)
// until we observe a full flush start and flush commit. So if we were not able to find
// a previous flush we will not enable reads now.
this.setReadsEnabled(true);
} else {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush commit marker with seqId:" + flush.getFlushSequenceNumber()
+ ", but no previous prepared snapshot was found");
// There is no corresponding prepare snapshot from before.
// We will pick up the new flushed file
replayFlushInStores(flush, null, false);
// Inspect the memstore contents to see whether the memstore contains only edits
// with seqId smaller than the flush seqId. If so, we can discard those edits.
dropMemstoreContentsForSeqId(flush.getFlushSequenceNumber(), null);
}
status.markComplete("Flush commit successful");
// Update the last flushed sequence id for region.
this.maxFlushedSeqId = flush.getFlushSequenceNumber();
// advance the mvcc read point so that the new flushed file is visible.
// there may be some in-flight transactions, but they won't be made visible since they are
// either greater than flush seq number or they were already dropped via flush.
// TODO: If we are using FlushAllStoresPolicy, then this can make edits visible from other
// stores while they are still in flight because the flush commit marker will not contain
// flushes from ALL stores.
getMVCC().advanceMemstoreReadPointIfNeeded(flush.getFlushSequenceNumber());
} catch (FileNotFoundException ex) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "At least one of the store files in flush: " + TextFormat.shortDebugString(flush)
+ " doesn't exist any more. Skip loading the file(s)", ex);
}
finally {
status.cleanup();
writestate.notifyAll();
}
}
// C. Finally notify anyone waiting on memstore to clear:
// e.g. checkResources().
synchronized (this) {
notifyAll(); // FindBugs NN_NAKED_NOTIFY
}
}
/**
* Replays the given flush descriptor by opening the flush files in stores and dropping the
* memstore snapshots if requested.
* @param flush
* @param prepareFlushResult
* @param dropMemstoreSnapshot
* @throws IOException
*/
private void replayFlushInStores(FlushDescriptor flush, PrepareFlushResult prepareFlushResult,
boolean dropMemstoreSnapshot)
throws IOException {
for (StoreFlushDescriptor storeFlush : flush.getStoreFlushesList()) {
byte[] family = storeFlush.getFamilyName().toByteArray();
Store store = getStore(family);
if (store == null) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush commit marker from primary, but the family is not found."
+ "Ignoring StoreFlushDescriptor:" + storeFlush);
continue;
}
List<String> flushFiles = storeFlush.getFlushOutputList();
StoreFlushContext ctx = null;
long startTime = EnvironmentEdgeManager.currentTime();
if (prepareFlushResult == null || prepareFlushResult.storeFlushCtxs == null) {
ctx = store.createFlushContext(flush.getFlushSequenceNumber());
} else {
ctx = prepareFlushResult.storeFlushCtxs.get(family);
startTime = prepareFlushResult.startTime;
}
if (ctx == null) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Unexpected: flush commit marker received from store "
+ Bytes.toString(family) + " but no associated flush context. Ignoring");
continue;
}
ctx.replayFlush(flushFiles, dropMemstoreSnapshot); // replay the flush
// Record latest flush time
this.lastStoreFlushTimeMap.put(store, startTime);
}
}
/**
* Drops the memstore contents after replaying a flush descriptor or region open event replay
* if the memstore edits have seqNums smaller than the given seq id
* @param flush the flush descriptor
* @throws IOException
*/
private long dropMemstoreContentsForSeqId(long seqId, Store store) throws IOException {
long totalFreedSize = 0;
this.updatesLock.writeLock().lock();
try {
mvcc.waitForPreviousTransactionsComplete();
long currentSeqId = getSequenceId().get();
if (seqId >= currentSeqId) {
// then we can drop the memstore contents since everything is below this seqId
LOG.info(getRegionInfo().getEncodedName() + " : "
+ "Dropping memstore contents as well since replayed flush seqId: "
+ seqId + " is greater than current seqId:" + currentSeqId);
// Prepare flush (take a snapshot) and then abort (drop the snapshot)
if (store == null ) {
for (Store s : stores.values()) {
totalFreedSize += doDropStoreMemstoreContentsForSeqId(s, currentSeqId);
}
} else {
totalFreedSize += doDropStoreMemstoreContentsForSeqId(store, currentSeqId);
}
} else {
LOG.info(getRegionInfo().getEncodedName() + " : "
+ "Not dropping memstore contents since replayed flush seqId: "
+ seqId + " is smaller than current seqId:" + currentSeqId);
}
} finally {
this.updatesLock.writeLock().unlock();
}
return totalFreedSize;
}
private long doDropStoreMemstoreContentsForSeqId(Store s, long currentSeqId) throws IOException {
long snapshotSize = s.getFlushableSize();
this.addAndGetGlobalMemstoreSize(-snapshotSize);
StoreFlushContext ctx = s.createFlushContext(currentSeqId);
ctx.prepare();
ctx.abort();
return snapshotSize;
}
private void replayWALFlushAbortMarker(FlushDescriptor flush) {
// nothing to do for now. A flush abort will cause a RS abort which means that the region
// will be opened somewhere else later. We will see the region open event soon, and replaying
// that will drop the snapshot
}
private void replayWALFlushCannotFlushMarker(FlushDescriptor flush, long replaySeqId) {
synchronized (writestate) {
if (this.lastReplayedOpenRegionSeqId > replaySeqId) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying flush event :" + TextFormat.shortDebugString(flush)
+ " because its sequence id " + replaySeqId + " is smaller than this regions "
+ "lastReplayedOpenRegionSeqId of " + lastReplayedOpenRegionSeqId);
return;
}
// If we were waiting for observing a flush or region opening event for not showing partial
// data after a secondary region crash, we can allow reads now. This event means that the
// primary was not able to flush because memstore is empty when we requested flush. By the
// time we observe this, we are guaranteed to have up to date seqId with our previous
// assignment.
this.setReadsEnabled(true);
}
}
@VisibleForTesting
PrepareFlushResult getPrepareFlushResult() {
return prepareFlushResult;
}
void replayWALRegionEventMarker(RegionEventDescriptor regionEvent) throws IOException {
checkTargetRegion(regionEvent.getEncodedRegionName().toByteArray(),
"RegionEvent marker from WAL ", regionEvent);
startRegionOperation(Operation.REPLAY_EVENT);
try {
if (ServerRegionReplicaUtil.isDefaultReplica(this.getRegionInfo())) {
return; // if primary nothing to do
}
if (regionEvent.getEventType() == EventType.REGION_CLOSE) {
// nothing to do on REGION_CLOSE for now.
return;
}
if (regionEvent.getEventType() != EventType.REGION_OPEN) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Unknown region event received, ignoring :"
+ TextFormat.shortDebugString(regionEvent));
return;
}
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ "Replaying region open event marker " + TextFormat.shortDebugString(regionEvent));
}
// we will use writestate as a coarse-grain lock for all the replay events
synchronized (writestate) {
// Replication can deliver events out of order when primary region moves or the region
// server crashes, since there is no coordination between replication of different wal files
// belonging to different region servers. We have to safe guard against this case by using
// region open event's seqid. Since this is the first event that the region puts (after
// possibly flushing recovered.edits), after seeing this event, we can ignore every edit
// smaller than this seqId
if (this.lastReplayedOpenRegionSeqId <= regionEvent.getLogSequenceNumber()) {
this.lastReplayedOpenRegionSeqId = regionEvent.getLogSequenceNumber();
} else {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying region event :" + TextFormat.shortDebugString(regionEvent)
+ " because its sequence id is smaller than this regions lastReplayedOpenRegionSeqId "
+ " of " + lastReplayedOpenRegionSeqId);
return;
}
// region open lists all the files that the region has at the time of the opening. Just pick
// all the files and drop prepared flushes and empty memstores
for (StoreDescriptor storeDescriptor : regionEvent.getStoresList()) {
// stores of primary may be different now
byte[] family = storeDescriptor.getFamilyName().toByteArray();
Store store = getStore(family);
if (store == null) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a region open marker from primary, but the family is not found. "
+ "Ignoring. StoreDescriptor:" + storeDescriptor);
continue;
}
long storeSeqId = store.getMaxSequenceId();
List<String> storeFiles = storeDescriptor.getStoreFileList();
try {
store.refreshStoreFiles(storeFiles); // replace the files with the new ones
} catch (FileNotFoundException ex) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "At least one of the store files: " + storeFiles
+ " doesn't exist any more. Skip loading the file(s)", ex);
continue;
}
if (store.getMaxSequenceId() != storeSeqId) {
// Record latest flush time if we picked up new files
lastStoreFlushTimeMap.put(store, EnvironmentEdgeManager.currentTime());
}
if (writestate.flushing) {
// only drop memstore snapshots if they are smaller than last flush for the store
if (this.prepareFlushResult.flushOpSeqId <= regionEvent.getLogSequenceNumber()) {
StoreFlushContext ctx = this.prepareFlushResult.storeFlushCtxs == null ?
null : this.prepareFlushResult.storeFlushCtxs.get(family);
if (ctx != null) {
long snapshotSize = store.getFlushableSize();
ctx.abort();
this.addAndGetGlobalMemstoreSize(-snapshotSize);
this.prepareFlushResult.storeFlushCtxs.remove(family);
}
}
}
// Drop the memstore contents if they are now smaller than the latest seen flushed file
dropMemstoreContentsForSeqId(regionEvent.getLogSequenceNumber(), store);
if (storeSeqId > this.maxFlushedSeqId) {
this.maxFlushedSeqId = storeSeqId;
}
}
// if all stores ended up dropping their snapshots, we can safely drop the
// prepareFlushResult
dropPrepareFlushIfPossible();
// advance the mvcc read point so that the new flushed file is visible.
// there may be some in-flight transactions, but they won't be made visible since they are
// either greater than flush seq number or they were already dropped via flush.
getMVCC().advanceMemstoreReadPointIfNeeded(this.maxFlushedSeqId);
// If we were waiting for observing a flush or region opening event for not showing partial
// data after a secondary region crash, we can allow reads now.
this.setReadsEnabled(true);
// C. Finally notify anyone waiting on memstore to clear:
// e.g. checkResources().
synchronized (this) {
notifyAll(); // FindBugs NN_NAKED_NOTIFY
}
}
logRegionFiles();
} finally {
closeRegionOperation(Operation.REPLAY_EVENT);
}
}
void replayWALBulkLoadEventMarker(WALProtos.BulkLoadDescriptor bulkLoadEvent) throws IOException {
checkTargetRegion(bulkLoadEvent.getEncodedRegionName().toByteArray(),
"BulkLoad marker from WAL ", bulkLoadEvent);
if (ServerRegionReplicaUtil.isDefaultReplica(this.getRegionInfo())) {
return; // if primary nothing to do
}
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ "Replaying bulkload event marker " + TextFormat.shortDebugString(bulkLoadEvent));
}
// check if multiple families involved
boolean multipleFamilies = false;
byte[] family = null;
for (StoreDescriptor storeDescriptor : bulkLoadEvent.getStoresList()) {
byte[] fam = storeDescriptor.getFamilyName().toByteArray();
if (family == null) {
family = fam;
} else if (!Bytes.equals(family, fam)) {
multipleFamilies = true;
break;
}
}
startBulkRegionOperation(multipleFamilies);
try {
// we will use writestate as a coarse-grain lock for all the replay events
synchronized (writestate) {
// Replication can deliver events out of order when primary region moves or the region
// server crashes, since there is no coordination between replication of different wal files
// belonging to different region servers. We have to safe guard against this case by using
// region open event's seqid. Since this is the first event that the region puts (after
// possibly flushing recovered.edits), after seeing this event, we can ignore every edit
// smaller than this seqId
if (bulkLoadEvent.getBulkloadSeqNum() >= 0
&& this.lastReplayedOpenRegionSeqId >= bulkLoadEvent.getBulkloadSeqNum()) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying bulkload event :"
+ TextFormat.shortDebugString(bulkLoadEvent)
+ " because its sequence id is smaller than this region's lastReplayedOpenRegionSeqId"
+ " =" + lastReplayedOpenRegionSeqId);
return;
}
for (StoreDescriptor storeDescriptor : bulkLoadEvent.getStoresList()) {
// stores of primary may be different now
family = storeDescriptor.getFamilyName().toByteArray();
Store store = getStore(family);
if (store == null) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a bulk load marker from primary, but the family is not found. "
+ "Ignoring. StoreDescriptor:" + storeDescriptor);
continue;
}
List<String> storeFiles = storeDescriptor.getStoreFileList();
for (String storeFile : storeFiles) {
StoreFileInfo storeFileInfo = null;
try {
storeFileInfo = fs.getStoreFileInfo(Bytes.toString(family), storeFile);
store.bulkLoadHFile(storeFileInfo);
} catch(FileNotFoundException ex) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ ((storeFileInfo != null) ? storeFileInfo.toString() :
(new Path(Bytes.toString(family), storeFile)).toString())
+ " doesn't exist any more. Skip loading the file");
}
}
}
}
if (bulkLoadEvent.getBulkloadSeqNum() > 0) {
getMVCC().advanceMemstoreReadPointIfNeeded(bulkLoadEvent.getBulkloadSeqNum());
}
} finally {
closeBulkRegionOperation();
}
}
/**
* If all stores ended up dropping their snapshots, we can safely drop the prepareFlushResult
*/
private void dropPrepareFlushIfPossible() {
if (writestate.flushing) {
boolean canDrop = true;
if (prepareFlushResult.storeFlushCtxs != null) {
for (Entry<byte[], StoreFlushContext> entry
: prepareFlushResult.storeFlushCtxs.entrySet()) {
Store store = getStore(entry.getKey());
if (store == null) {
continue;
}
if (store.getSnapshotSize() > 0) {
canDrop = false;
break;
}
}
}
// this means that all the stores in the region has finished flushing, but the WAL marker
// may not have been written or we did not receive it yet.
if (canDrop) {
writestate.flushing = false;
this.prepareFlushResult = null;
}
}
}
@Override
public boolean refreshStoreFiles() throws IOException {
if (ServerRegionReplicaUtil.isDefaultReplica(this.getRegionInfo())) {
return false; // if primary nothing to do
}
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ "Refreshing store files to see whether we can free up memstore");
}
long totalFreedSize = 0;
long smallestSeqIdInStores = Long.MAX_VALUE;
startRegionOperation(); // obtain region close lock
try {
synchronized (writestate) {
for (Store store : getStores()) {
// TODO: some stores might see new data from flush, while others do not which
// MIGHT break atomic edits across column families.
long maxSeqIdBefore = store.getMaxSequenceId();
// refresh the store files. This is similar to observing a region open wal marker.
store.refreshStoreFiles();
long storeSeqId = store.getMaxSequenceId();
if (storeSeqId < smallestSeqIdInStores) {
smallestSeqIdInStores = storeSeqId;
}
// see whether we can drop the memstore or the snapshot
if (storeSeqId > maxSeqIdBefore) {
if (writestate.flushing) {
// only drop memstore snapshots if they are smaller than last flush for the store
if (this.prepareFlushResult.flushOpSeqId <= storeSeqId) {
StoreFlushContext ctx = this.prepareFlushResult.storeFlushCtxs == null ?
null : this.prepareFlushResult.storeFlushCtxs.get(store.getFamily().getName());
if (ctx != null) {
long snapshotSize = store.getFlushableSize();
ctx.abort();
this.addAndGetGlobalMemstoreSize(-snapshotSize);
this.prepareFlushResult.storeFlushCtxs.remove(store.getFamily().getName());
totalFreedSize += snapshotSize;
}
}
}
// Drop the memstore contents if they are now smaller than the latest seen flushed file
totalFreedSize += dropMemstoreContentsForSeqId(storeSeqId, store);
}
}
// if all stores ended up dropping their snapshots, we can safely drop the
// prepareFlushResult
dropPrepareFlushIfPossible();
// advance the mvcc read point so that the new flushed files are visible.
// there may be some in-flight transactions, but they won't be made visible since they are
// either greater than flush seq number or they were already picked up via flush.
for (Store s : getStores()) {
getMVCC().advanceMemstoreReadPointIfNeeded(s.getMaxMemstoreTS());
}
// smallestSeqIdInStores is the seqId that we have a corresponding hfile for. We can safely
// skip all edits that are to be replayed in the future with that has a smaller seqId
// than this. We are updating lastReplayedOpenRegionSeqId so that we can skip all edits
// that we have picked the flush files for
if (this.lastReplayedOpenRegionSeqId < smallestSeqIdInStores) {
this.lastReplayedOpenRegionSeqId = smallestSeqIdInStores;
}
}
// C. Finally notify anyone waiting on memstore to clear:
// e.g. checkResources().
synchronized (this) {
notifyAll(); // FindBugs NN_NAKED_NOTIFY
}
return totalFreedSize > 0;
} finally {
closeRegionOperation();
}
}
private void logRegionFiles() {
if (LOG.isTraceEnabled()) {
LOG.trace(getRegionInfo().getEncodedName() + " : Store files for region: ");
for (Store s : stores.values()) {
for (StoreFile sf : s.getStorefiles()) {
LOG.trace(getRegionInfo().getEncodedName() + " : " + sf);
}
}
}
}
/** Checks whether the given regionName is either equal to our region, or that
* the regionName is the primary region to our corresponding range for the secondary replica.
*/
private void checkTargetRegion(byte[] encodedRegionName, String exceptionMsg, Object payload)
throws WrongRegionException {
if (Bytes.equals(this.getRegionInfo().getEncodedNameAsBytes(), encodedRegionName)) {
return;
}
if (!RegionReplicaUtil.isDefaultReplica(this.getRegionInfo()) &&
Bytes.equals(encodedRegionName,
this.fs.getRegionInfoForFS().getEncodedNameAsBytes())) {
return;
}
throw new WrongRegionException(exceptionMsg + payload
+ " targetted for region " + Bytes.toStringBinary(encodedRegionName)
+ " does not match this region: " + this.getRegionInfo());
}
/**
* Used by tests
* @param s Store to add edit too.
* @param cell Cell to add.
* @return True if we should flush.
*/
protected boolean restoreEdit(final Store s, final Cell cell) {
long kvSize = s.add(cell).getFirst();
if (this.rsAccounting != null) {
rsAccounting.addAndGetRegionReplayEditsSize(getRegionInfo().getRegionName(), kvSize);
}
return isFlushSize(this.addAndGetGlobalMemstoreSize(kvSize));
}
/*
* @param fs
* @param p File to check.
* @return True if file was zero-length (and if so, we'll delete it in here).
* @throws IOException
*/
private static boolean isZeroLengthThenDelete(final FileSystem fs, final Path p)
throws IOException {
FileStatus stat = fs.getFileStatus(p);
if (stat.getLen() > 0) return false;
LOG.warn("File " + p + " is zero-length, deleting.");
fs.delete(p, false);
return true;
}
protected HStore instantiateHStore(final HColumnDescriptor family) throws IOException {
return new HStore(this, family, this.conf);
}
@Override
public Store getStore(final byte[] column) {
return this.stores.get(column);
}
/**
* Return HStore instance. Does not do any copy: as the number of store is limited, we
* iterate on the list.
*/
private Store getStore(Cell cell) {
for (Map.Entry<byte[], Store> famStore : stores.entrySet()) {
if (Bytes.equals(
cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
famStore.getKey(), 0, famStore.getKey().length)) {
return famStore.getValue();
}
}
return null;
}
@Override
public List<Store> getStores() {
List<Store> list = new ArrayList<Store>(stores.size());
list.addAll(stores.values());
return list;
}
@Override
public List<String> getStoreFileList(final byte [][] columns)
throws IllegalArgumentException {
List<String> storeFileNames = new ArrayList<String>();
synchronized(closeLock) {
for(byte[] column : columns) {
Store store = this.stores.get(column);
if (store == null) {
throw new IllegalArgumentException("No column family : " +
new String(column) + " available");
}
for (StoreFile storeFile: store.getStorefiles()) {
storeFileNames.add(storeFile.getPath().toString());
}
logRegionFiles();
}
}
return storeFileNames;
}
//////////////////////////////////////////////////////////////////////////////
// Support code
//////////////////////////////////////////////////////////////////////////////
/** Make sure this is a valid row for the HRegion */
void checkRow(final byte [] row, String op) throws IOException {
if (!rowIsInRange(getRegionInfo(), row)) {
throw new WrongRegionException("Requested row out of range for " +
op + " on HRegion " + this + ", startKey='" +
Bytes.toStringBinary(getRegionInfo().getStartKey()) + "', getEndKey()='" +
Bytes.toStringBinary(getRegionInfo().getEndKey()) + "', row='" +
Bytes.toStringBinary(row) + "'");
}
}
@Override
public RowLock getRowLock(byte[] row, boolean waitForLock) throws IOException {
startRegionOperation();
try {
return getRowLockInternal(row, waitForLock);
} finally {
closeRegionOperation();
}
}
/**
* A version of getRowLock(byte[], boolean) to use when a region operation has already been
* started (the calling thread has already acquired the region-close-guard lock).
*/
protected RowLock getRowLockInternal(byte[] row, boolean waitForLock) throws IOException {
HashedBytes rowKey = new HashedBytes(row);
RowLockContext rowLockContext = new RowLockContext(rowKey);
// loop until we acquire the row lock (unless !waitForLock)
while (true) {
RowLockContext existingContext = lockedRows.putIfAbsent(rowKey, rowLockContext);
if (existingContext == null) {
// Row is not already locked by any thread, use newly created context.
break;
} else if (existingContext.ownedByCurrentThread()) {
// Row is already locked by current thread, reuse existing context instead.
rowLockContext = existingContext;
break;
} else {
if (!waitForLock) {
return null;
}
TraceScope traceScope = null;
try {
if (Trace.isTracing()) {
traceScope = Trace.startSpan("HRegion.getRowLockInternal");
}
// Row is already locked by some other thread, give up or wait for it
if (!existingContext.latch.await(this.rowLockWaitDuration, TimeUnit.MILLISECONDS)) {
if(traceScope != null) {
traceScope.getSpan().addTimelineAnnotation("Failed to get row lock");
}
throw new IOException("Timed out waiting for lock for row: " + rowKey);
}
if (traceScope != null) traceScope.close();
traceScope = null;
} catch (InterruptedException ie) {
LOG.warn("Thread interrupted waiting for lock on row: " + rowKey);
InterruptedIOException iie = new InterruptedIOException();
iie.initCause(ie);
throw iie;
} finally {
if (traceScope != null) traceScope.close();
}
}
}
// allocate new lock for this thread
return rowLockContext.newLock();
}
/**
* Acquires a lock on the given row.
* The same thread may acquire multiple locks on the same row.
* @return the acquired row lock
* @throws IOException if the lock could not be acquired after waiting
*/
public RowLock getRowLock(byte[] row) throws IOException {
return getRowLock(row, true);
}
@Override
public void releaseRowLocks(List<RowLock> rowLocks) {
if (rowLocks != null) {
for (RowLock rowLock : rowLocks) {
rowLock.release();
}
rowLocks.clear();
}
}
/**
* Determines whether multiple column families are present
* Precondition: familyPaths is not null
*
* @param familyPaths List of Pair<byte[] column family, String hfilePath>
*/
private static boolean hasMultipleColumnFamilies(Collection<Pair<byte[], String>> familyPaths) {
boolean multipleFamilies = false;
byte[] family = null;
for (Pair<byte[], String> pair : familyPaths) {
byte[] fam = pair.getFirst();
if (family == null) {
family = fam;
} else if (!Bytes.equals(family, fam)) {
multipleFamilies = true;
break;
}
}
return multipleFamilies;
}
@Override
public boolean bulkLoadHFiles(Collection<Pair<byte[], String>> familyPaths, boolean assignSeqId,
BulkLoadListener bulkLoadListener) throws IOException {
long seqId = -1;
Map<byte[], List<Path>> storeFiles = new TreeMap<byte[], List<Path>>(Bytes.BYTES_COMPARATOR);
Preconditions.checkNotNull(familyPaths);
// we need writeLock for multi-family bulk load
startBulkRegionOperation(hasMultipleColumnFamilies(familyPaths));
try {
this.writeRequestsCount.increment();
// There possibly was a split that happened between when the split keys
// were gathered and before the HRegion's write lock was taken. We need
// to validate the HFile region before attempting to bulk load all of them
List<IOException> ioes = new ArrayList<IOException>();
List<Pair<byte[], String>> failures = new ArrayList<Pair<byte[], String>>();
for (Pair<byte[], String> p : familyPaths) {
byte[] familyName = p.getFirst();
String path = p.getSecond();
Store store = getStore(familyName);
if (store == null) {
IOException ioe = new org.apache.hadoop.hbase.DoNotRetryIOException(
"No such column family " + Bytes.toStringBinary(familyName));
ioes.add(ioe);
} else {
try {
store.assertBulkLoadHFileOk(new Path(path));
} catch (WrongRegionException wre) {
// recoverable (file doesn't fit in region)
failures.add(p);
} catch (IOException ioe) {
// unrecoverable (hdfs problem)
ioes.add(ioe);
}
}
}
// validation failed because of some sort of IO problem.
if (ioes.size() != 0) {
IOException e = MultipleIOException.createIOException(ioes);
LOG.error("There were one or more IO errors when checking if the bulk load is ok.", e);
throw e;
}
// validation failed, bail out before doing anything permanent.
if (failures.size() != 0) {
StringBuilder list = new StringBuilder();
for (Pair<byte[], String> p : failures) {
list.append("\n").append(Bytes.toString(p.getFirst())).append(" : ")
.append(p.getSecond());
}
// problem when validating
LOG.warn("There was a recoverable bulk load failure likely due to a" +
" split. These (family, HFile) pairs were not loaded: " + list);
return false;
}
// We need to assign a sequential ID that's in between two memstores in order to preserve
// the guarantee that all the edits lower than the highest sequential ID from all the
// HFiles are flushed on disk. See HBASE-10958. The sequence id returned when we flush is
// guaranteed to be one beyond the file made when we flushed (or if nothing to flush, it is
// a sequence id that we can be sure is beyond the last hfile written).
if (assignSeqId) {
FlushResult fs = flushcache(true, false);
if (fs.isFlushSucceeded()) {
seqId = ((FlushResultImpl)fs).flushSequenceId;
} else if (fs.getResult() == FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY) {
seqId = ((FlushResultImpl)fs).flushSequenceId;
} else {
throw new IOException("Could not bulk load with an assigned sequential ID because the "+
"flush didn't run. Reason for not flushing: " + ((FlushResultImpl)fs).failureReason);
}
}
for (Pair<byte[], String> p : familyPaths) {
byte[] familyName = p.getFirst();
String path = p.getSecond();
Store store = getStore(familyName);
try {
String finalPath = path;
if (bulkLoadListener != null) {
finalPath = bulkLoadListener.prepareBulkLoad(familyName, path);
}
Path commitedStoreFile = store.bulkLoadHFile(finalPath, seqId);
if(storeFiles.containsKey(familyName)) {
storeFiles.get(familyName).add(commitedStoreFile);
} else {
List<Path> storeFileNames = new ArrayList<Path>();
storeFileNames.add(commitedStoreFile);
storeFiles.put(familyName, storeFileNames);
}
if (bulkLoadListener != null) {
bulkLoadListener.doneBulkLoad(familyName, path);
}
} catch (IOException ioe) {
// A failure here can cause an atomicity violation that we currently
// cannot recover from since it is likely a failed HDFS operation.
// TODO Need a better story for reverting partial failures due to HDFS.
LOG.error("There was a partial failure due to IO when attempting to" +
" load " + Bytes.toString(p.getFirst()) + " : " + p.getSecond(), ioe);
if (bulkLoadListener != null) {
try {
bulkLoadListener.failedBulkLoad(familyName, path);
} catch (Exception ex) {
LOG.error("Error while calling failedBulkLoad for family " +
Bytes.toString(familyName) + " with path " + path, ex);
}
}
throw ioe;
}
}
return true;
} finally {
if (wal != null && !storeFiles.isEmpty()) {
// write a bulk load event when not all hfiles are loaded
try {
WALProtos.BulkLoadDescriptor loadDescriptor = ProtobufUtil.toBulkLoadDescriptor(
this.getRegionInfo().getTable(),
ByteStringer.wrap(this.getRegionInfo().getEncodedNameAsBytes()), storeFiles, seqId);
WALUtil.writeBulkLoadMarkerAndSync(wal, this.htableDescriptor, getRegionInfo(),
loadDescriptor, sequenceId);
} catch (IOException ioe) {
if (this.rsServices != null) {
// Have to abort region server because some hfiles has been loaded but we can't write
// the event into WAL
this.rsServices.abort("Failed to write bulk load event into WAL.", ioe);
}
}
}
closeBulkRegionOperation();
}
}
@Override
public boolean equals(Object o) {
return o instanceof HRegion && Bytes.equals(getRegionInfo().getRegionName(),
((HRegion) o).getRegionInfo().getRegionName());
}
@Override
public int hashCode() {
return Bytes.hashCode(getRegionInfo().getRegionName());
}
@Override
public String toString() {
return getRegionInfo().getRegionNameAsString();
}
/**
* RegionScannerImpl is used to combine scanners from multiple Stores (aka column families).
*/
class RegionScannerImpl implements RegionScanner {
// Package local for testability
KeyValueHeap storeHeap = null;
/** Heap of key-values that are not essential for the provided filters and are thus read
* on demand, if on-demand column family loading is enabled.*/
KeyValueHeap joinedHeap = null;
/**
* If the joined heap data gathering is interrupted due to scan limits, this will
* contain the row for which we are populating the values.*/
protected Cell joinedContinuationRow = null;
protected final byte[] stopRow;
private final FilterWrapper filter;
private ScannerContext defaultScannerContext;
protected int isScan;
private boolean filterClosed = false;
private long readPt;
private long maxResultSize;
protected HRegion region;
@Override
public HRegionInfo getRegionInfo() {
return region.getRegionInfo();
}
RegionScannerImpl(Scan scan, List<KeyValueScanner> additionalScanners, HRegion region)
throws IOException {
this.region = region;
this.maxResultSize = scan.getMaxResultSize();
if (scan.hasFilter()) {
this.filter = new FilterWrapper(scan.getFilter());
} else {
this.filter = null;
}
/**
* By default, calls to next/nextRaw must enforce the batch limit. Thus, construct a default
* scanner context that can be used to enforce the batch limit in the event that a
* ScannerContext is not specified during an invocation of next/nextRaw
*/
defaultScannerContext = ScannerContext.newBuilder().setBatchLimit(scan.getBatch()).build();
if (Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW) && !scan.isGetScan()) {
this.stopRow = null;
} else {
this.stopRow = scan.getStopRow();
}
// If we are doing a get, we want to be [startRow,endRow] normally
// it is [startRow,endRow) and if startRow=endRow we get nothing.
this.isScan = scan.isGetScan() ? -1 : 0;
// synchronize on scannerReadPoints so that nobody calculates
// getSmallestReadPoint, before scannerReadPoints is updated.
IsolationLevel isolationLevel = scan.getIsolationLevel();
synchronized(scannerReadPoints) {
this.readPt = getReadpoint(isolationLevel);
scannerReadPoints.put(this, this.readPt);
}
// Here we separate all scanners into two lists - scanner that provide data required
// by the filter to operate (scanners list) and all others (joinedScanners list).
List<KeyValueScanner> scanners = new ArrayList<KeyValueScanner>();
List<KeyValueScanner> joinedScanners = new ArrayList<KeyValueScanner>();
if (additionalScanners != null) {
scanners.addAll(additionalScanners);
}
for (Map.Entry<byte[], NavigableSet<byte[]>> entry :
scan.getFamilyMap().entrySet()) {
Store store = stores.get(entry.getKey());
KeyValueScanner scanner = store.getScanner(scan, entry.getValue(), this.readPt);
if (this.filter == null || !scan.doLoadColumnFamiliesOnDemand()
|| this.filter.isFamilyEssential(entry.getKey())) {
scanners.add(scanner);
} else {
joinedScanners.add(scanner);
}
}
initializeKVHeap(scanners, joinedScanners, region);
}
protected void initializeKVHeap(List<KeyValueScanner> scanners,
List<KeyValueScanner> joinedScanners, HRegion region)
throws IOException {
this.storeHeap = new KeyValueHeap(scanners, region.comparator);
if (!joinedScanners.isEmpty()) {
this.joinedHeap = new KeyValueHeap(joinedScanners, region.comparator);
}
}
@Override
public long getMaxResultSize() {
return maxResultSize;
}
@Override
public long getMvccReadPoint() {
return this.readPt;
}
@Override
public int getBatch() {
return this.defaultScannerContext.getBatchLimit();
}
/**
* Reset both the filter and the old filter.
*
* @throws IOException in case a filter raises an I/O exception.
*/
protected void resetFilters() throws IOException {
if (filter != null) {
filter.reset();
}
}
@Override
public boolean next(List<Cell> outResults)
throws IOException {
// apply the batching limit by default
return next(outResults, defaultScannerContext);
}
@Override
public synchronized boolean next(List<Cell> outResults, ScannerContext scannerContext) throws IOException {
if (this.filterClosed) {
throw new UnknownScannerException("Scanner was closed (timed out?) " +
"after we renewed it. Could be caused by a very slow scanner " +
"or a lengthy garbage collection");
}
startRegionOperation(Operation.SCAN);
readRequestsCount.increment();
try {
return nextRaw(outResults, scannerContext);
} finally {
closeRegionOperation(Operation.SCAN);
}
}
@Override
public boolean nextRaw(List<Cell> outResults) throws IOException {
// Use the RegionScanner's context by default
return nextRaw(outResults, defaultScannerContext);
}
@Override
public boolean nextRaw(List<Cell> outResults, ScannerContext scannerContext)
throws IOException {
if (storeHeap == null) {
// scanner is closed
throw new UnknownScannerException("Scanner was closed");
}
boolean moreValues;
if (outResults.isEmpty()) {
// Usually outResults is empty. This is true when next is called
// to handle scan or get operation.
moreValues = nextInternal(outResults, scannerContext);
} else {
List<Cell> tmpList = new ArrayList<Cell>();
moreValues = nextInternal(tmpList, scannerContext);
outResults.addAll(tmpList);
}
// If the size limit was reached it means a partial Result is being returned. Returning a
// partial Result means that we should not reset the filters; filters should only be reset in
// between rows
if (!scannerContext.partialResultFormed()) resetFilters();
if (isFilterDoneInternal()) {
moreValues = false;
}
return moreValues;
}
/**
* @return true if more cells exist after this batch, false if scanner is done
*/
private boolean populateFromJoinedHeap(List<Cell> results, ScannerContext scannerContext)
throws IOException {
assert joinedContinuationRow != null;
boolean moreValues =
populateResult(results, this.joinedHeap, scannerContext,
joinedContinuationRow.getRowArray(), joinedContinuationRow.getRowOffset(),
joinedContinuationRow.getRowLength());
if (!scannerContext.checkAnyLimitReached(LimitScope.BETWEEN_CELLS)) {
// We are done with this row, reset the continuation.
joinedContinuationRow = null;
}
// As the data is obtained from two independent heaps, we need to
// ensure that result list is sorted, because Result relies on that.
Collections.sort(results, comparator);
return moreValues;
}
/**
* Fetches records with currentRow into results list, until next row, batchLimit (if not -1) is
* reached, or remainingResultSize (if not -1) is reaced
* @param heap KeyValueHeap to fetch data from.It must be positioned on correct row before call.
* @param scannerContext
* @param currentRow Byte array with key we are fetching.
* @param offset offset for currentRow
* @param length length for currentRow
* @return state of last call to {@link KeyValueHeap#next()}
*/
private boolean populateResult(List<Cell> results, KeyValueHeap heap,
ScannerContext scannerContext, byte[] currentRow, int offset, short length)
throws IOException {
Cell nextKv;
boolean moreCellsInRow = false;
boolean tmpKeepProgress = scannerContext.getKeepProgress();
// Scanning between column families and thus the scope is between cells
LimitScope limitScope = LimitScope.BETWEEN_CELLS;
do {
// We want to maintain any progress that is made towards the limits while scanning across
// different column families. To do this, we toggle the keep progress flag on during calls
// to the StoreScanner to ensure that any progress made thus far is not wiped away.
scannerContext.setKeepProgress(true);
heap.next(results, scannerContext);
scannerContext.setKeepProgress(tmpKeepProgress);
nextKv = heap.peek();
moreCellsInRow = moreCellsInRow(nextKv, currentRow, offset, length);
if (scannerContext.checkBatchLimit(limitScope)) {
return scannerContext.setScannerState(NextState.BATCH_LIMIT_REACHED).hasMoreValues();
} else if (scannerContext.checkSizeLimit(limitScope)) {
ScannerContext.NextState state =
moreCellsInRow ? NextState.SIZE_LIMIT_REACHED_MID_ROW : NextState.SIZE_LIMIT_REACHED;
return scannerContext.setScannerState(state).hasMoreValues();
} else if (scannerContext.checkTimeLimit(limitScope)) {
ScannerContext.NextState state =
moreCellsInRow ? NextState.TIME_LIMIT_REACHED_MID_ROW : NextState.TIME_LIMIT_REACHED;
return scannerContext.setScannerState(state).hasMoreValues();
}
} while (moreCellsInRow);
return nextKv != null;
}
/**
* Based on the nextKv in the heap, and the current row, decide whether or not there are more
* cells to be read in the heap. If the row of the nextKv in the heap matches the current row
* then there are more cells to be read in the row.
* @param nextKv
* @param currentRow
* @param offset
* @param length
* @return true When there are more cells in the row to be read
*/
private boolean moreCellsInRow(final Cell nextKv, byte[] currentRow, int offset,
short length) {
return nextKv != null && CellUtil.matchingRow(nextKv, currentRow, offset, length);
}
/*
* @return True if a filter rules the scanner is over, done.
*/
@Override
public synchronized boolean isFilterDone() throws IOException {
return isFilterDoneInternal();
}
private boolean isFilterDoneInternal() throws IOException {
return this.filter != null && this.filter.filterAllRemaining();
}
private boolean nextInternal(List<Cell> results, ScannerContext scannerContext)
throws IOException {
if (!results.isEmpty()) {
throw new IllegalArgumentException("First parameter should be an empty list");
}
if (scannerContext == null) {
throw new IllegalArgumentException("Scanner context cannot be null");
}
RpcCallContext rpcCall = RpcServer.getCurrentCall();
// Save the initial progress from the Scanner context in these local variables. The progress
// may need to be reset a few times if rows are being filtered out so we save the initial
// progress.
int initialBatchProgress = scannerContext.getBatchProgress();
long initialSizeProgress = scannerContext.getSizeProgress();
long initialTimeProgress = scannerContext.getTimeProgress();
// The loop here is used only when at some point during the next we determine
// that due to effects of filters or otherwise, we have an empty row in the result.
// Then we loop and try again. Otherwise, we must get out on the first iteration via return,
// "true" if there's more data to read, "false" if there isn't (storeHeap is at a stop row,
// and joinedHeap has no more data to read for the last row (if set, joinedContinuationRow).
while (true) {
// Starting to scan a new row. Reset the scanner progress according to whether or not
// progress should be kept.
if (scannerContext.getKeepProgress()) {
// Progress should be kept. Reset to initial values seen at start of method invocation.
scannerContext
.setProgress(initialBatchProgress, initialSizeProgress, initialTimeProgress);
} else {
scannerContext.clearProgress();
}
if (rpcCall != null) {
// If a user specifies a too-restrictive or too-slow scanner, the
// client might time out and disconnect while the server side
// is still processing the request. We should abort aggressively
// in that case.
long afterTime = rpcCall.disconnectSince();
if (afterTime >= 0) {
throw new CallerDisconnectedException(
"Aborting on region " + getRegionInfo().getRegionNameAsString() + ", call " +
this + " after " + afterTime + " ms, since " +
"caller disconnected");
}
}
// Let's see what we have in the storeHeap.
Cell current = this.storeHeap.peek();
byte[] currentRow = null;
int offset = 0;
short length = 0;
if (current != null) {
currentRow = current.getRowArray();
offset = current.getRowOffset();
length = current.getRowLength();
}
boolean stopRow = isStopRow(currentRow, offset, length);
// When has filter row is true it means that the all the cells for a particular row must be
// read before a filtering decision can be made. This means that filters where hasFilterRow
// run the risk of encountering out of memory errors in the case that they are applied to a
// table that has very large rows.
boolean hasFilterRow = this.filter != null && this.filter.hasFilterRow();
// If filter#hasFilterRow is true, partial results are not allowed since allowing them
// would prevent the filters from being evaluated. Thus, if it is true, change the
// scope of any limits that could potentially create partial results to
// LimitScope.BETWEEN_ROWS so that those limits are not reached mid-row
if (hasFilterRow) {
if (LOG.isTraceEnabled()) {
LOG.trace("filter#hasFilterRow is true which prevents partial results from being "
+ " formed. Changing scope of limits that may create partials");
}
scannerContext.setSizeLimitScope(LimitScope.BETWEEN_ROWS);
scannerContext.setTimeLimitScope(LimitScope.BETWEEN_ROWS);
}
// Check if we were getting data from the joinedHeap and hit the limit.
// If not, then it's main path - getting results from storeHeap.
if (joinedContinuationRow == null) {
// First, check if we are at a stop row. If so, there are no more results.
if (stopRow) {
if (hasFilterRow) {
filter.filterRowCells(results);
}
return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
}
// Check if rowkey filter wants to exclude this row. If so, loop to next.
// Technically, if we hit limits before on this row, we don't need this call.
if (filterRowKey(currentRow, offset, length)) {
boolean moreRows = nextRow(currentRow, offset, length);
if (!moreRows) {
return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
}
results.clear();
continue;
}
// Ok, we are good, let's try to get some results from the main heap.
populateResult(results, this.storeHeap, scannerContext, currentRow, offset, length);
if (scannerContext.checkAnyLimitReached(LimitScope.BETWEEN_CELLS)) {
if (hasFilterRow) {
throw new IncompatibleFilterException(
"Filter whose hasFilterRow() returns true is incompatible with scans that must "
+ " stop mid-row because of a limit. ScannerContext:" + scannerContext);
}
return true;
}
Cell nextKv = this.storeHeap.peek();
stopRow = nextKv == null ||
isStopRow(nextKv.getRowArray(), nextKv.getRowOffset(), nextKv.getRowLength());
// save that the row was empty before filters applied to it.
final boolean isEmptyRow = results.isEmpty();
// We have the part of the row necessary for filtering (all of it, usually).
// First filter with the filterRow(List).
FilterWrapper.FilterRowRetCode ret = FilterWrapper.FilterRowRetCode.NOT_CALLED;
if (hasFilterRow) {
ret = filter.filterRowCellsWithRet(results);
// We don't know how the results have changed after being filtered. Must set progress
// according to contents of results now. However, a change in the results should not
// affect the time progress. Thus preserve whatever time progress has been made
long timeProgress = scannerContext.getTimeProgress();
if (scannerContext.getKeepProgress()) {
scannerContext.setProgress(initialBatchProgress, initialSizeProgress,
initialTimeProgress);
} else {
scannerContext.clearProgress();
}
scannerContext.setTimeProgress(timeProgress);
scannerContext.incrementBatchProgress(results.size());
for (Cell cell : results) {
scannerContext.incrementSizeProgress(CellUtil.estimatedHeapSizeOfWithoutTags(cell));
}
}
if ((isEmptyRow || ret == FilterWrapper.FilterRowRetCode.EXCLUDE) || filterRow()) {
results.clear();
boolean moreRows = nextRow(currentRow, offset, length);
if (!moreRows) {
return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
}
// This row was totally filtered out, if this is NOT the last row,
// we should continue on. Otherwise, nothing else to do.
if (!stopRow) continue;
return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
}
// Ok, we are done with storeHeap for this row.
// Now we may need to fetch additional, non-essential data into row.
// These values are not needed for filter to work, so we postpone their
// fetch to (possibly) reduce amount of data loads from disk.
if (this.joinedHeap != null) {
boolean mayHaveData = joinedHeapMayHaveData(currentRow, offset, length);
if (mayHaveData) {
joinedContinuationRow = current;
populateFromJoinedHeap(results, scannerContext);
if (scannerContext.checkAnyLimitReached(LimitScope.BETWEEN_CELLS)) {
return true;
}
}
}
} else {
// Populating from the joined heap was stopped by limits, populate some more.
populateFromJoinedHeap(results, scannerContext);
if (scannerContext.checkAnyLimitReached(LimitScope.BETWEEN_CELLS)) {
return true;
}
}
// We may have just called populateFromJoinedMap and hit the limits. If that is
// the case, we need to call it again on the next next() invocation.
if (joinedContinuationRow != null) {
return scannerContext.setScannerState(NextState.MORE_VALUES).hasMoreValues();
}
// Finally, we are done with both joinedHeap and storeHeap.
// Double check to prevent empty rows from appearing in result. It could be
// the case when SingleColumnValueExcludeFilter is used.
if (results.isEmpty()) {
boolean moreRows = nextRow(currentRow, offset, length);
if (!moreRows) {
return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
}
if (!stopRow) continue;
}
// We are done. Return the result.
if (stopRow) {
return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
} else {
return scannerContext.setScannerState(NextState.MORE_VALUES).hasMoreValues();
}
}
}
/**
* @param currentRow
* @param offset
* @param length
* @return true when the joined heap may have data for the current row
* @throws IOException
*/
private boolean joinedHeapMayHaveData(byte[] currentRow, int offset, short length)
throws IOException {
Cell nextJoinedKv = joinedHeap.peek();
boolean matchCurrentRow =
nextJoinedKv != null && CellUtil.matchingRow(nextJoinedKv, currentRow, offset, length);
boolean matchAfterSeek = false;
// If the next value in the joined heap does not match the current row, try to seek to the
// correct row
if (!matchCurrentRow) {
Cell firstOnCurrentRow = KeyValueUtil.createFirstOnRow(currentRow, offset, length);
boolean seekSuccessful = this.joinedHeap.requestSeek(firstOnCurrentRow, true, true);
matchAfterSeek =
seekSuccessful && joinedHeap.peek() != null
&& CellUtil.matchingRow(joinedHeap.peek(), currentRow, offset, length);
}
return matchCurrentRow || matchAfterSeek;
}
/**
* This function is to maintain backward compatibility for 0.94 filters. HBASE-6429 combines
* both filterRow & filterRow(List<KeyValue> kvs) functions. While 0.94 code or older, it may
* not implement hasFilterRow as HBase-6429 expects because 0.94 hasFilterRow() only returns
* true when filterRow(List<KeyValue> kvs) is overridden not the filterRow(). Therefore, the
* filterRow() will be skipped.
*/
private boolean filterRow() throws IOException {
// when hasFilterRow returns true, filter.filterRow() will be called automatically inside
// filterRowCells(List<Cell> kvs) so we skip that scenario here.
return filter != null && (!filter.hasFilterRow())
&& filter.filterRow();
}
private boolean filterRowKey(byte[] row, int offset, short length) throws IOException {
return filter != null
&& filter.filterRowKey(row, offset, length);
}
protected boolean nextRow(byte [] currentRow, int offset, short length) throws IOException {
assert this.joinedContinuationRow == null: "Trying to go to next row during joinedHeap read.";
Cell next;
while ((next = this.storeHeap.peek()) != null &&
CellUtil.matchingRow(next, currentRow, offset, length)) {
this.storeHeap.next(MOCKED_LIST);
}
resetFilters();
// Calling the hook in CP which allows it to do a fast forward
return this.region.getCoprocessorHost() == null
|| this.region.getCoprocessorHost()
.postScannerFilterRow(this, currentRow, offset, length);
}
protected boolean isStopRow(byte[] currentRow, int offset, short length) {
return currentRow == null ||
(stopRow != null &&
comparator.compareRows(stopRow, 0, stopRow.length,
currentRow, offset, length) <= isScan);
}
@Override
public synchronized void close() {
if (storeHeap != null) {
storeHeap.close();
storeHeap = null;
}
if (joinedHeap != null) {
joinedHeap.close();
joinedHeap = null;
}
// no need to synchronize here.
scannerReadPoints.remove(this);
this.filterClosed = true;
}
KeyValueHeap getStoreHeapForTesting() {
return storeHeap;
}
@Override
public synchronized boolean reseek(byte[] row) throws IOException {
if (row == null) {
throw new IllegalArgumentException("Row cannot be null.");
}
boolean result = false;
startRegionOperation();
try {
KeyValue kv = KeyValueUtil.createFirstOnRow(row);
// use request seek to make use of the lazy seek option. See HBASE-5520
result = this.storeHeap.requestSeek(kv, true, true);
if (this.joinedHeap != null) {
result = this.joinedHeap.requestSeek(kv, true, true) || result;
}
} finally {
closeRegionOperation();
}
return result;
}
}
// Utility methods
/**
* A utility method to create new instances of HRegion based on the
* {@link HConstants#REGION_IMPL} configuration property.
* @param tableDir qualified path of directory where region should be located,
* usually the table directory.
* @param wal The WAL is the outbound log for any updates to the HRegion
* The wal file is a logfile from the previous execution that's
* custom-computed for this HRegion. The HRegionServer computes and sorts the
* appropriate wal info for this HRegion. If there is a previous file
* (implying that the HRegion has been written-to before), then read it from
* the supplied path.
* @param fs is the filesystem.
* @param conf is global configuration settings.
* @param regionInfo - HRegionInfo that describes the region
* is new), then read them from the supplied path.
* @param htd the table descriptor
* @return the new instance
*/
static HRegion newHRegion(Path tableDir, WAL wal, FileSystem fs,
Configuration conf, HRegionInfo regionInfo, final HTableDescriptor htd,
RegionServerServices rsServices) {
try {
@SuppressWarnings("unchecked")
Class<? extends HRegion> regionClass =
(Class<? extends HRegion>) conf.getClass(HConstants.REGION_IMPL, HRegion.class);
Constructor<? extends HRegion> c =
regionClass.getConstructor(Path.class, WAL.class, FileSystem.class,
Configuration.class, HRegionInfo.class, HTableDescriptor.class,
RegionServerServices.class);
return c.newInstance(tableDir, wal, fs, conf, regionInfo, htd, rsServices);
} catch (Throwable e) {
// todo: what should I throw here?
throw new IllegalStateException("Could not instantiate a region instance.", e);
}
}
/**
* Convenience method creating new HRegions. Used by createTable and by the
* bootstrap code in the HMaster constructor.
* Note, this method creates an {@link WAL} for the created region. It
* needs to be closed explicitly. Use {@link HRegion#getWAL()} to get
* access. <b>When done with a region created using this method, you will
* need to explicitly close the {@link WAL} it created too; it will not be
* done for you. Not closing the wal will leave at least a daemon thread
* running.</b> Call {@link #closeHRegion(HRegion)} and it will do
* necessary cleanup for you.
* @param info Info for region to create.
* @param rootDir Root directory for HBase instance
* @return new HRegion
*
* @throws IOException
*/
public static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
final Configuration conf, final HTableDescriptor hTableDescriptor)
throws IOException {
return createHRegion(info, rootDir, conf, hTableDescriptor, null);
}
/**
* This will do the necessary cleanup a call to
* {@link #createHRegion(HRegionInfo, Path, Configuration, HTableDescriptor)}
* requires. This method will close the region and then close its
* associated {@link WAL} file. You can still use it if you call the other createHRegion,
* the one that takes an {@link WAL} instance but don't be surprised by the
* call to the {@link WAL#close()} on the {@link WAL} the
* HRegion was carrying.
* @throws IOException
*/
public static void closeHRegion(final HRegion r) throws IOException {
if (r == null) return;
r.close();
if (r.getWAL() == null) return;
r.getWAL().close();
}
/**
* Convenience method creating new HRegions. Used by createTable.
* The {@link WAL} for the created region needs to be closed explicitly.
* Use {@link HRegion#getWAL()} to get access.
*
* @param info Info for region to create.
* @param rootDir Root directory for HBase instance
* @param wal shared WAL
* @param initialize - true to initialize the region
* @return new HRegion
*
* @throws IOException
*/
public static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
final Configuration conf,
final HTableDescriptor hTableDescriptor,
final WAL wal,
final boolean initialize)
throws IOException {
return createHRegion(info, rootDir, conf, hTableDescriptor,
wal, initialize, false);
}
/**
* Convenience method creating new HRegions. Used by createTable.
* The {@link WAL} for the created region needs to be closed
* explicitly, if it is not null.
* Use {@link HRegion#getWAL()} to get access.
*
* @param info Info for region to create.
* @param rootDir Root directory for HBase instance
* @param wal shared WAL
* @param initialize - true to initialize the region
* @param ignoreWAL - true to skip generate new wal if it is null, mostly for createTable
* @return new HRegion
* @throws IOException
*/
public static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
final Configuration conf,
final HTableDescriptor hTableDescriptor,
final WAL wal,
final boolean initialize, final boolean ignoreWAL)
throws IOException {
Path tableDir = FSUtils.getTableDir(rootDir, info.getTable());
return createHRegion(info, rootDir, tableDir, conf, hTableDescriptor, wal, initialize,
ignoreWAL);
}
/**
* Convenience method creating new HRegions. Used by createTable.
* The {@link WAL} for the created region needs to be closed
* explicitly, if it is not null.
* Use {@link HRegion#getWAL()} to get access.
*
* @param info Info for region to create.
* @param rootDir Root directory for HBase instance
* @param tableDir table directory
* @param wal shared WAL
* @param initialize - true to initialize the region
* @param ignoreWAL - true to skip generate new wal if it is null, mostly for createTable
* @return new HRegion
* @throws IOException
*/
public static HRegion createHRegion(final HRegionInfo info, final Path rootDir, final Path tableDir,
final Configuration conf,
final HTableDescriptor hTableDescriptor,
final WAL wal,
final boolean initialize, final boolean ignoreWAL)
throws IOException {
LOG.info("creating HRegion " + info.getTable().getNameAsString()
+ " HTD == " + hTableDescriptor + " RootDir = " + rootDir +
" Table name == " + info.getTable().getNameAsString());
FileSystem fs = FileSystem.get(conf);
HRegionFileSystem.createRegionOnFileSystem(conf, fs, tableDir, info);
WAL effectiveWAL = wal;
if (wal == null && !ignoreWAL) {
// TODO HBASE-11983 There'll be no roller for this wal?
// The WAL subsystem will use the default rootDir rather than the passed in rootDir
// unless I pass along via the conf.
Configuration confForWAL = new Configuration(conf);
confForWAL.set(HConstants.HBASE_DIR, rootDir.toString());
effectiveWAL = (new WALFactory(confForWAL,
Collections.<WALActionsListener>singletonList(new MetricsWAL()),
"hregion-" + RandomStringUtils.randomNumeric(8))).
getWAL(info.getEncodedNameAsBytes());
}
HRegion region = HRegion.newHRegion(tableDir,
effectiveWAL, fs, conf, info, hTableDescriptor, null);
if (initialize) {
// If initializing, set the sequenceId. It is also required by WALPerformanceEvaluation when
// verifying the WALEdits.
region.setSequenceId(region.initialize(null));
}
return region;
}
public static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
final Configuration conf,
final HTableDescriptor hTableDescriptor,
final WAL wal)
throws IOException {
return createHRegion(info, rootDir, conf, hTableDescriptor, wal, true);
}
/**
* Open a Region.
* @param info Info for region to be opened.
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @return new HRegion
*
* @throws IOException
*/
public static HRegion openHRegion(final HRegionInfo info,
final HTableDescriptor htd, final WAL wal,
final Configuration conf)
throws IOException {
return openHRegion(info, htd, wal, conf, null, null);
}
/**
* Open a Region.
* @param info Info for region to be opened
* @param htd the table descriptor
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @param conf The Configuration object to use.
* @param rsServices An interface we can request flushes against.
* @param reporter An interface we can report progress against.
* @return new HRegion
*
* @throws IOException
*/
public static HRegion openHRegion(final HRegionInfo info,
final HTableDescriptor htd, final WAL wal, final Configuration conf,
final RegionServerServices rsServices,
final CancelableProgressable reporter)
throws IOException {
return openHRegion(FSUtils.getRootDir(conf), info, htd, wal, conf, rsServices, reporter);
}
/**
* Open a Region.
* @param rootDir Root directory for HBase instance
* @param info Info for region to be opened.
* @param htd the table descriptor
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @param conf The Configuration object to use.
* @return new HRegion
* @throws IOException
*/
public static HRegion openHRegion(Path rootDir, final HRegionInfo info,
final HTableDescriptor htd, final WAL wal, final Configuration conf)
throws IOException {
return openHRegion(rootDir, info, htd, wal, conf, null, null);
}
/**
* Open a Region.
* @param rootDir Root directory for HBase instance
* @param info Info for region to be opened.
* @param htd the table descriptor
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @param conf The Configuration object to use.
* @param rsServices An interface we can request flushes against.
* @param reporter An interface we can report progress against.
* @return new HRegion
* @throws IOException
*/
public static HRegion openHRegion(final Path rootDir, final HRegionInfo info,
final HTableDescriptor htd, final WAL wal, final Configuration conf,
final RegionServerServices rsServices,
final CancelableProgressable reporter)
throws IOException {
FileSystem fs = null;
if (rsServices != null) {
fs = rsServices.getFileSystem();
}
if (fs == null) {
fs = FileSystem.get(conf);
}
return openHRegion(conf, fs, rootDir, info, htd, wal, rsServices, reporter);
}
/**
* Open a Region.
* @param conf The Configuration object to use.
* @param fs Filesystem to use
* @param rootDir Root directory for HBase instance
* @param info Info for region to be opened.
* @param htd the table descriptor
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @return new HRegion
* @throws IOException
*/
public static HRegion openHRegion(final Configuration conf, final FileSystem fs,
final Path rootDir, final HRegionInfo info, final HTableDescriptor htd, final WAL wal)
throws IOException {
return openHRegion(conf, fs, rootDir, info, htd, wal, null, null);
}
/**
* Open a Region.
* @param conf The Configuration object to use.
* @param fs Filesystem to use
* @param rootDir Root directory for HBase instance
* @param info Info for region to be opened.
* @param htd the table descriptor
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @param rsServices An interface we can request flushes against.
* @param reporter An interface we can report progress against.
* @return new HRegion
* @throws IOException
*/
public static HRegion openHRegion(final Configuration conf, final FileSystem fs,
final Path rootDir, final HRegionInfo info, final HTableDescriptor htd, final WAL wal,
final RegionServerServices rsServices, final CancelableProgressable reporter)
throws IOException {
Path tableDir = FSUtils.getTableDir(rootDir, info.getTable());
return openHRegion(conf, fs, rootDir, tableDir, info, htd, wal, rsServices, reporter);
}
/**
* Open a Region.
* @param conf The Configuration object to use.
* @param fs Filesystem to use
* @param rootDir Root directory for HBase instance
* @param info Info for region to be opened.
* @param htd the table descriptor
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @param rsServices An interface we can request flushes against.
* @param reporter An interface we can report progress against.
* @return new HRegion
* @throws IOException
*/
public static HRegion openHRegion(final Configuration conf, final FileSystem fs,
final Path rootDir, final Path tableDir, final HRegionInfo info, final HTableDescriptor htd,
final WAL wal, final RegionServerServices rsServices,
final CancelableProgressable reporter)
throws IOException {
if (info == null) throw new NullPointerException("Passed region info is null");
if (LOG.isDebugEnabled()) {
LOG.debug("Opening region: " + info);
}
HRegion r = HRegion.newHRegion(tableDir, wal, fs, conf, info, htd, rsServices);
return r.openHRegion(reporter);
}
/**
* Useful when reopening a closed region (normally for unit tests)
* @param other original object
* @param reporter An interface we can report progress against.
* @return new HRegion
* @throws IOException
*/
public static HRegion openHRegion(final HRegion other, final CancelableProgressable reporter)
throws IOException {
HRegionFileSystem regionFs = other.getRegionFileSystem();
HRegion r = newHRegion(regionFs.getTableDir(), other.getWAL(), regionFs.getFileSystem(),
other.baseConf, other.getRegionInfo(), other.getTableDesc(), null);
return r.openHRegion(reporter);
}
public static Region openHRegion(final Region other, final CancelableProgressable reporter)
throws IOException {
return openHRegion((HRegion)other, reporter);
}
/**
* Open HRegion.
* Calls initialize and sets sequenceId.
* @return Returns <code>this</code>
* @throws IOException
*/
protected HRegion openHRegion(final CancelableProgressable reporter)
throws IOException {
// Refuse to open the region if we are missing local compression support
checkCompressionCodecs();
// Refuse to open the region if encryption configuration is incorrect or
// codec support is missing
checkEncryption();
// Refuse to open the region if a required class cannot be loaded
checkClassLoading();
this.openSeqNum = initialize(reporter);
this.setSequenceId(openSeqNum);
if (wal != null && getRegionServerServices() != null && !writestate.readOnly
&& !isRecovering) {
// Only write the region open event marker to WAL if (1) we are not read-only
// (2) dist log replay is off or we are not recovering. In case region is
// recovering, the open event will be written at setRecovering(false)
writeRegionOpenMarker(wal, openSeqNum);
}
return this;
}
public static void warmupHRegion(final HRegionInfo info,
final HTableDescriptor htd, final WAL wal, final Configuration conf,
final RegionServerServices rsServices,
final CancelableProgressable reporter)
throws IOException {
if (info == null) throw new NullPointerException("Passed region info is null");
if (LOG.isDebugEnabled()) {
LOG.debug("HRegion.Warming up region: " + info);
}
Path rootDir = FSUtils.getRootDir(conf);
Path tableDir = FSUtils.getTableDir(rootDir, info.getTable());
FileSystem fs = null;
if (rsServices != null) {
fs = rsServices.getFileSystem();
}
if (fs == null) {
fs = FileSystem.get(conf);
}
HRegion r = HRegion.newHRegion(tableDir, wal, fs, conf, info, htd, rsServices);
r.initializeWarmup(reporter);
r.close();
}
private void checkCompressionCodecs() throws IOException {
for (HColumnDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
CompressionTest.testCompression(fam.getCompression());
CompressionTest.testCompression(fam.getCompactionCompression());
}
}
private void checkEncryption() throws IOException {
for (HColumnDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
EncryptionTest.testEncryption(conf, fam.getEncryptionType(), fam.getEncryptionKey());
}
}
private void checkClassLoading() throws IOException {
RegionSplitPolicy.getSplitPolicyClass(this.htableDescriptor, conf);
RegionCoprocessorHost.testTableCoprocessorAttrs(conf, this.htableDescriptor);
}
/**
* Create a daughter region from given a temp directory with the region data.
* @param hri Spec. for daughter region to open.
* @throws IOException
*/
HRegion createDaughterRegionFromSplits(final HRegionInfo hri) throws IOException {
// Move the files from the temporary .splits to the final /table/region directory
fs.commitDaughterRegion(hri);
// Create the daughter HRegion instance
HRegion r = HRegion.newHRegion(this.fs.getTableDir(), this.getWAL(), fs.getFileSystem(),
this.getBaseConf(), hri, this.getTableDesc(), rsServices);
r.readRequestsCount.set(this.getReadRequestsCount() / 2);
r.writeRequestsCount.set(this.getWriteRequestsCount() / 2);
return r;
}
/**
* Create a merged region given a temp directory with the region data.
* @param region_b another merging region
* @return merged HRegion
* @throws IOException
*/
HRegion createMergedRegionFromMerges(final HRegionInfo mergedRegionInfo,
final HRegion region_b) throws IOException {
HRegion r = HRegion.newHRegion(this.fs.getTableDir(), this.getWAL(),
fs.getFileSystem(), this.getBaseConf(), mergedRegionInfo,
this.getTableDesc(), this.rsServices);
r.readRequestsCount.set(this.getReadRequestsCount()
+ region_b.getReadRequestsCount());
r.writeRequestsCount.set(this.getWriteRequestsCount()
+ region_b.getWriteRequestsCount());
this.fs.commitMergedRegion(mergedRegionInfo);
return r;
}
/**
* Inserts a new region's meta information into the passed
* <code>meta</code> region. Used by the HMaster bootstrap code adding
* new table to hbase:meta table.
*
* @param meta hbase:meta HRegion to be updated
* @param r HRegion to add to <code>meta</code>
*
* @throws IOException
*/
// TODO remove since only test and merge use this
public static void addRegionToMETA(final HRegion meta, final HRegion r) throws IOException {
meta.checkResources();
// The row key is the region name
byte[] row = r.getRegionInfo().getRegionName();
final long now = EnvironmentEdgeManager.currentTime();
final List<Cell> cells = new ArrayList<Cell>(2);
cells.add(new KeyValue(row, HConstants.CATALOG_FAMILY,
HConstants.REGIONINFO_QUALIFIER, now,
r.getRegionInfo().toByteArray()));
// Set into the root table the version of the meta table.
cells.add(new KeyValue(row, HConstants.CATALOG_FAMILY,
HConstants.META_VERSION_QUALIFIER, now,
Bytes.toBytes(HConstants.META_VERSION)));
meta.put(row, HConstants.CATALOG_FAMILY, cells);
}
/**
* Computes the Path of the HRegion
*
* @param tabledir qualified path for table
* @param name ENCODED region name
* @return Path of HRegion directory
*/
@Deprecated
public static Path getRegionDir(final Path tabledir, final String name) {
return new Path(tabledir, name);
}
/**
* Computes the Path of the HRegion
*
* @param rootdir qualified path of HBase root directory
* @param info HRegionInfo for the region
* @return qualified path of region directory
*/
@Deprecated
@VisibleForTesting
public static Path getRegionDir(final Path rootdir, final HRegionInfo info) {
return new Path(
FSUtils.getTableDir(rootdir, info.getTable()), info.getEncodedName());
}
/**
* Determines if the specified row is within the row range specified by the
* specified HRegionInfo
*
* @param info HRegionInfo that specifies the row range
* @param row row to be checked
* @return true if the row is within the range specified by the HRegionInfo
*/
public static boolean rowIsInRange(HRegionInfo info, final byte [] row) {
return ((info.getStartKey().length == 0) ||
(Bytes.compareTo(info.getStartKey(), row) <= 0)) &&
((info.getEndKey().length == 0) ||
(Bytes.compareTo(info.getEndKey(), row) > 0));
}
/**
* Merge two HRegions. The regions must be adjacent and must not overlap.
*
* @return new merged HRegion
* @throws IOException
*/
public static HRegion mergeAdjacent(final HRegion srcA, final HRegion srcB)
throws IOException {
HRegion a = srcA;
HRegion b = srcB;
// Make sure that srcA comes first; important for key-ordering during
// write of the merged file.
if (srcA.getRegionInfo().getStartKey() == null) {
if (srcB.getRegionInfo().getStartKey() == null) {
throw new IOException("Cannot merge two regions with null start key");
}
// A's start key is null but B's isn't. Assume A comes before B
} else if ((srcB.getRegionInfo().getStartKey() == null) ||
(Bytes.compareTo(srcA.getRegionInfo().getStartKey(),
srcB.getRegionInfo().getStartKey()) > 0)) {
a = srcB;
b = srcA;
}
if (!(Bytes.compareTo(a.getRegionInfo().getEndKey(),
b.getRegionInfo().getStartKey()) == 0)) {
throw new IOException("Cannot merge non-adjacent regions");
}
return merge(a, b);
}
/**
* Merge two regions whether they are adjacent or not.
*
* @param a region a
* @param b region b
* @return new merged region
* @throws IOException
*/
public static HRegion merge(final HRegion a, final HRegion b) throws IOException {
if (!a.getRegionInfo().getTable().equals(b.getRegionInfo().getTable())) {
throw new IOException("Regions do not belong to the same table");
}
FileSystem fs = a.getRegionFileSystem().getFileSystem();
// Make sure each region's cache is empty
a.flush(true);
b.flush(true);
// Compact each region so we only have one store file per family
a.compact(true);
if (LOG.isDebugEnabled()) {
LOG.debug("Files for region: " + a);
a.getRegionFileSystem().logFileSystemState(LOG);
}
b.compact(true);
if (LOG.isDebugEnabled()) {
LOG.debug("Files for region: " + b);
b.getRegionFileSystem().logFileSystemState(LOG);
}
RegionMergeTransactionImpl rmt = new RegionMergeTransactionImpl(a, b, true);
if (!rmt.prepare(null)) {
throw new IOException("Unable to merge regions " + a + " and " + b);
}
HRegionInfo mergedRegionInfo = rmt.getMergedRegionInfo();
LOG.info("starting merge of regions: " + a + " and " + b
+ " into new region " + mergedRegionInfo.getRegionNameAsString()
+ " with start key <"
+ Bytes.toStringBinary(mergedRegionInfo.getStartKey())
+ "> and end key <"
+ Bytes.toStringBinary(mergedRegionInfo.getEndKey()) + ">");
HRegion dstRegion;
try {
dstRegion = (HRegion)rmt.execute(null, null);
} catch (IOException ioe) {
rmt.rollback(null, null);
throw new IOException("Failed merging region " + a + " and " + b
+ ", and successfully rolled back");
}
dstRegion.compact(true);
if (LOG.isDebugEnabled()) {
LOG.debug("Files for new region");
dstRegion.getRegionFileSystem().logFileSystemState(LOG);
}
if (dstRegion.getRegionFileSystem().hasReferences(dstRegion.getTableDesc())) {
throw new IOException("Merged region " + dstRegion
+ " still has references after the compaction, is compaction canceled?");
}
// Archiving the 'A' region
HFileArchiver.archiveRegion(a.getBaseConf(), fs, a.getRegionInfo());
// Archiving the 'B' region
HFileArchiver.archiveRegion(b.getBaseConf(), fs, b.getRegionInfo());
LOG.info("merge completed. New region is " + dstRegion);
return dstRegion;
}
@Override
public Result get(final Get get) throws IOException {
checkRow(get.getRow(), "Get");
// Verify families are all valid
if (get.hasFamilies()) {
for (byte [] family: get.familySet()) {
checkFamily(family);
}
} else { // Adding all families to scanner
for (byte[] family: this.htableDescriptor.getFamiliesKeys()) {
get.addFamily(family);
}
}
List<Cell> results = get(get, true);
boolean stale = this.getRegionInfo().getReplicaId() != 0;
return Result.create(results, get.isCheckExistenceOnly() ? !results.isEmpty() : null, stale);
}
@Override
public List<Cell> get(Get get, boolean withCoprocessor) throws IOException {
List<Cell> results = new ArrayList<Cell>();
// pre-get CP hook
if (withCoprocessor && (coprocessorHost != null)) {
if (coprocessorHost.preGet(get, results)) {
return results;
}
}
Scan scan = new Scan(get);
RegionScanner scanner = null;
try {
scanner = getScanner(scan);
scanner.next(results);
} finally {
if (scanner != null)
scanner.close();
}
// post-get CP hook
if (withCoprocessor && (coprocessorHost != null)) {
coprocessorHost.postGet(get, results);
}
// do after lock
if (this.metricsRegion != null) {
long totalSize = 0L;
for (Cell cell : results) {
totalSize += CellUtil.estimatedSerializedSizeOf(cell);
}
this.metricsRegion.updateGet(totalSize);
}
return results;
}
public void mutateRow(RowMutations rm) throws IOException {
// Don't need nonces here - RowMutations only supports puts and deletes
mutateRowsWithLocks(rm.getMutations(), Collections.singleton(rm.getRow()));
}
/**
* Perform atomic mutations within the region w/o nonces.
* See {@link #mutateRowsWithLocks(Collection, Collection, long, long)}
*/
public void mutateRowsWithLocks(Collection<Mutation> mutations,
Collection<byte[]> rowsToLock) throws IOException {
mutateRowsWithLocks(mutations, rowsToLock, HConstants.NO_NONCE, HConstants.NO_NONCE);
}
/**
* Perform atomic mutations within the region.
* @param mutations The list of mutations to perform.
* <code>mutations</code> can contain operations for multiple rows.
* Caller has to ensure that all rows are contained in this region.
* @param rowsToLock Rows to lock
* @param nonceGroup Optional nonce group of the operation (client Id)
* @param nonce Optional nonce of the operation (unique random id to ensure "more idempotence")
* If multiple rows are locked care should be taken that
* <code>rowsToLock</code> is sorted in order to avoid deadlocks.
* @throws IOException
*/
public void mutateRowsWithLocks(Collection<Mutation> mutations,
Collection<byte[]> rowsToLock, long nonceGroup, long nonce) throws IOException {
MultiRowMutationProcessor proc = new MultiRowMutationProcessor(mutations, rowsToLock);
processRowsWithLocks(proc, -1, nonceGroup, nonce);
}
/**
* @return the current load statistics for the the region
*/
public ClientProtos.RegionLoadStats getRegionStats() {
if (!regionStatsEnabled) {
return null;
}
ClientProtos.RegionLoadStats.Builder stats = ClientProtos.RegionLoadStats.newBuilder();
stats.setMemstoreLoad((int) (Math.min(100, (this.memstoreSize.get() * 100) / this
.memstoreFlushSize)));
stats.setHeapOccupancy((int)rsServices.getHeapMemoryManager().getHeapOccupancyPercent()*100);
return stats.build();
}
@Override
public void processRowsWithLocks(RowProcessor<?,?> processor) throws IOException {
processRowsWithLocks(processor, rowProcessorTimeout, HConstants.NO_NONCE,
HConstants.NO_NONCE);
}
@Override
public void processRowsWithLocks(RowProcessor<?,?> processor, long nonceGroup, long nonce)
throws IOException {
processRowsWithLocks(processor, rowProcessorTimeout, nonceGroup, nonce);
}
@Override
public void processRowsWithLocks(RowProcessor<?,?> processor, long timeout,
long nonceGroup, long nonce) throws IOException {
for (byte[] row : processor.getRowsToLock()) {
checkRow(row, "processRowsWithLocks");
}
if (!processor.readOnly()) {
checkReadOnly();
}
checkResources();
startRegionOperation();
WALEdit walEdit = new WALEdit();
// 1. Run pre-process hook
try {
processor.preProcess(this, walEdit);
} catch (IOException e) {
closeRegionOperation();
throw e;
}
// Short circuit the read only case
if (processor.readOnly()) {
try {
long now = EnvironmentEdgeManager.currentTime();
doProcessRowWithTimeout(
processor, now, this, null, null, timeout);
processor.postProcess(this, walEdit, true);
} finally {
closeRegionOperation();
}
return;
}
MultiVersionConsistencyControl.WriteEntry writeEntry = null;
boolean locked;
boolean walSyncSuccessful = false;
List<RowLock> acquiredRowLocks;
long addedSize = 0;
List<Mutation> mutations = new ArrayList<Mutation>();
List<Cell> memstoreCells = new ArrayList<Cell>();
Collection<byte[]> rowsToLock = processor.getRowsToLock();
long mvccNum = 0;
WALKey walKey = null;
try {
// 2. Acquire the row lock(s)
acquiredRowLocks = new ArrayList<RowLock>(rowsToLock.size());
for (byte[] row : rowsToLock) {
// Attempt to lock all involved rows, throw if any lock times out
acquiredRowLocks.add(getRowLock(row));
}
// 3. Region lock
lock(this.updatesLock.readLock(), acquiredRowLocks.size() == 0 ? 1 : acquiredRowLocks.size());
locked = true;
// Get a mvcc write number
mvccNum = MultiVersionConsistencyControl.getPreAssignedWriteNumber(this.sequenceId);
long now = EnvironmentEdgeManager.currentTime();
try {
// 4. Let the processor scan the rows, generate mutations and add
// waledits
doProcessRowWithTimeout(
processor, now, this, mutations, walEdit, timeout);
if (!mutations.isEmpty()) {
// 5. Start mvcc transaction
writeEntry = mvcc.beginMemstoreInsertWithSeqNum(mvccNum);
// 6. Call the preBatchMutate hook
processor.preBatchMutate(this, walEdit);
// 7. Apply to memstore
for (Mutation m : mutations) {
// Handle any tag based cell features
rewriteCellTags(m.getFamilyCellMap(), m);
for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
Cell cell = cellScanner.current();
CellUtil.setSequenceId(cell, mvccNum);
Store store = getStore(cell);
if (store == null) {
checkFamily(CellUtil.cloneFamily(cell));
// unreachable
}
Pair<Long, Cell> ret = store.add(cell);
addedSize += ret.getFirst();
memstoreCells.add(ret.getSecond());
}
}
long txid = 0;
// 8. Append no sync
if (!walEdit.isEmpty()) {
// we use HLogKey here instead of WALKey directly to support legacy coprocessors.
walKey = new HLogKey(this.getRegionInfo().getEncodedNameAsBytes(),
this.htableDescriptor.getTableName(), WALKey.NO_SEQUENCE_ID, now,
processor.getClusterIds(), nonceGroup, nonce);
txid = this.wal.append(this.htableDescriptor, this.getRegionInfo(),
walKey, walEdit, getSequenceId(), true, memstoreCells);
}
if(walKey == null){
// since we use wal sequence Id as mvcc, for SKIP_WAL changes we need a "faked" WALEdit
// to get a sequence id assigned which is done by FSWALEntry#stampRegionSequenceId
walKey = this.appendEmptyEdit(this.wal, memstoreCells);
}
// 9. Release region lock
if (locked) {
this.updatesLock.readLock().unlock();
locked = false;
}
// 10. Release row lock(s)
releaseRowLocks(acquiredRowLocks);
// 11. Sync edit log
if (txid != 0) {
syncOrDefer(txid, getEffectiveDurability(processor.useDurability()));
}
walSyncSuccessful = true;
// 12. call postBatchMutate hook
processor.postBatchMutate(this);
}
} finally {
if (!mutations.isEmpty() && !walSyncSuccessful) {
LOG.warn("Wal sync failed. Roll back " + mutations.size() +
" memstore keyvalues for row(s):" + StringUtils.byteToHexString(
processor.getRowsToLock().iterator().next()) + "...");
for (Mutation m : mutations) {
for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
Cell cell = cellScanner.current();
getStore(cell).rollback(cell);
}
}
}
// 13. Roll mvcc forward
if (writeEntry != null) {
mvcc.completeMemstoreInsertWithSeqNum(writeEntry, walKey);
}
if (locked) {
this.updatesLock.readLock().unlock();
}
// release locks if some were acquired but another timed out
releaseRowLocks(acquiredRowLocks);
}
// 14. Run post-process hook
processor.postProcess(this, walEdit, walSyncSuccessful);
} finally {
closeRegionOperation();
if (!mutations.isEmpty() &&
isFlushSize(this.addAndGetGlobalMemstoreSize(addedSize))) {
requestFlush();
}
}
}
private void doProcessRowWithTimeout(final RowProcessor<?,?> processor,
final long now,
final HRegion region,
final List<Mutation> mutations,
final WALEdit walEdit,
final long timeout) throws IOException {
// Short circuit the no time bound case.
if (timeout < 0) {
try {
processor.process(now, region, mutations, walEdit);
} catch (IOException e) {
LOG.warn("RowProcessor:" + processor.getClass().getName() +
" throws Exception on row(s):" +
Bytes.toStringBinary(
processor.getRowsToLock().iterator().next()) + "...", e);
throw e;
}
return;
}
// Case with time bound
FutureTask<Void> task =
new FutureTask<Void>(new Callable<Void>() {
@Override
public Void call() throws IOException {
try {
processor.process(now, region, mutations, walEdit);
return null;
} catch (IOException e) {
LOG.warn("RowProcessor:" + processor.getClass().getName() +
" throws Exception on row(s):" +
Bytes.toStringBinary(
processor.getRowsToLock().iterator().next()) + "...", e);
throw e;
}
}
});
rowProcessorExecutor.execute(task);
try {
task.get(timeout, TimeUnit.MILLISECONDS);
} catch (TimeoutException te) {
LOG.error("RowProcessor timeout:" + timeout + " ms on row(s):" +
Bytes.toStringBinary(processor.getRowsToLock().iterator().next()) +
"...");
throw new IOException(te);
} catch (Exception e) {
throw new IOException(e);
}
}
public Result append(Append append) throws IOException {
return append(append, HConstants.NO_NONCE, HConstants.NO_NONCE);
}
// TODO: There's a lot of boiler plate code identical to increment.
// We should refactor append and increment as local get-mutate-put
// transactions, so all stores only go through one code path for puts.
@Override
public Result append(Append append, long nonceGroup, long nonce) throws IOException {
byte[] row = append.getRow();
checkRow(row, "append");
boolean flush = false;
Durability durability = getEffectiveDurability(append.getDurability());
boolean writeToWAL = durability != Durability.SKIP_WAL;
WALEdit walEdits = null;
List<Cell> allKVs = new ArrayList<Cell>(append.size());
Map<Store, List<Cell>> tempMemstore = new HashMap<Store, List<Cell>>();
long size = 0;
long txid = 0;
checkReadOnly();
checkResources();
// Lock row
startRegionOperation(Operation.APPEND);
this.writeRequestsCount.increment();
long mvccNum = 0;
WriteEntry w = null;
WALKey walKey = null;
RowLock rowLock = null;
List<Cell> memstoreCells = new ArrayList<Cell>();
boolean doRollBackMemstore = false;
try {
rowLock = getRowLock(row);
try {
lock(this.updatesLock.readLock());
try {
// wait for all prior MVCC transactions to finish - while we hold the row lock
// (so that we are guaranteed to see the latest state)
mvcc.waitForPreviousTransactionsComplete();
if (this.coprocessorHost != null) {
Result r = this.coprocessorHost.preAppendAfterRowLock(append);
if(r!= null) {
return r;
}
}
// now start my own transaction
mvccNum = MultiVersionConsistencyControl.getPreAssignedWriteNumber(this.sequenceId);
w = mvcc.beginMemstoreInsertWithSeqNum(mvccNum);
long now = EnvironmentEdgeManager.currentTime();
// Process each family
for (Map.Entry<byte[], List<Cell>> family : append.getFamilyCellMap().entrySet()) {
Store store = stores.get(family.getKey());
List<Cell> kvs = new ArrayList<Cell>(family.getValue().size());
// Sort the cells so that they match the order that they
// appear in the Get results. Otherwise, we won't be able to
// find the existing values if the cells are not specified
// in order by the client since cells are in an array list.
Collections.sort(family.getValue(), store.getComparator());
// Get previous values for all columns in this family
Get get = new Get(row);
for (Cell cell : family.getValue()) {
get.addColumn(family.getKey(), CellUtil.cloneQualifier(cell));
}
List<Cell> results = get(get, false);
// Iterate the input columns and update existing values if they were
// found, otherwise add new column initialized to the append value
// Avoid as much copying as possible. We may need to rewrite and
// consolidate tags. Bytes are only copied once.
// Would be nice if KeyValue had scatter/gather logic
int idx = 0;
for (Cell cell : family.getValue()) {
Cell newCell;
Cell oldCell = null;
if (idx < results.size()
&& CellUtil.matchingQualifier(results.get(idx), cell)) {
oldCell = results.get(idx);
long ts = Math.max(now, oldCell.getTimestamp());
// Process cell tags
List<Tag> newTags = new ArrayList<Tag>();
// Make a union of the set of tags in the old and new KVs
if (oldCell.getTagsLength() > 0) {
Iterator<Tag> i = CellUtil.tagsIterator(oldCell.getTagsArray(),
oldCell.getTagsOffset(), oldCell.getTagsLength());
while (i.hasNext()) {
newTags.add(i.next());
}
}
if (cell.getTagsLength() > 0) {
Iterator<Tag> i = CellUtil.tagsIterator(cell.getTagsArray(),
cell.getTagsOffset(), cell.getTagsLength());
while (i.hasNext()) {
newTags.add(i.next());
}
}
// Cell TTL handling
if (append.getTTL() != Long.MAX_VALUE) {
// Add the new TTL tag
newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(append.getTTL())));
}
// Rebuild tags
byte[] tagBytes = Tag.fromList(newTags);
// allocate an empty cell once
newCell = new KeyValue(row.length, cell.getFamilyLength(),
cell.getQualifierLength(), ts, KeyValue.Type.Put,
oldCell.getValueLength() + cell.getValueLength(),
tagBytes.length);
// copy in row, family, and qualifier
System.arraycopy(cell.getRowArray(), cell.getRowOffset(),
newCell.getRowArray(), newCell.getRowOffset(), cell.getRowLength());
System.arraycopy(cell.getFamilyArray(), cell.getFamilyOffset(),
newCell.getFamilyArray(), newCell.getFamilyOffset(),
cell.getFamilyLength());
System.arraycopy(cell.getQualifierArray(), cell.getQualifierOffset(),
newCell.getQualifierArray(), newCell.getQualifierOffset(),
cell.getQualifierLength());
// copy in the value
System.arraycopy(oldCell.getValueArray(), oldCell.getValueOffset(),
newCell.getValueArray(), newCell.getValueOffset(),
oldCell.getValueLength());
System.arraycopy(cell.getValueArray(), cell.getValueOffset(),
newCell.getValueArray(),
newCell.getValueOffset() + oldCell.getValueLength(),
cell.getValueLength());
// Copy in tag data
System.arraycopy(tagBytes, 0, newCell.getTagsArray(), newCell.getTagsOffset(),
tagBytes.length);
idx++;
} else {
// Append's KeyValue.Type==Put and ts==HConstants.LATEST_TIMESTAMP
CellUtil.updateLatestStamp(cell, now);
// Cell TTL handling
if (append.getTTL() != Long.MAX_VALUE) {
List<Tag> newTags = new ArrayList<Tag>(1);
newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(append.getTTL())));
// Add the new TTL tag
newCell = new KeyValue(cell.getRowArray(), cell.getRowOffset(),
cell.getRowLength(),
cell.getFamilyArray(), cell.getFamilyOffset(),
cell.getFamilyLength(),
cell.getQualifierArray(), cell.getQualifierOffset(),
cell.getQualifierLength(),
cell.getTimestamp(), KeyValue.Type.codeToType(cell.getTypeByte()),
cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(),
newTags);
} else {
newCell = cell;
}
}
CellUtil.setSequenceId(newCell, mvccNum);
// Give coprocessors a chance to update the new cell
if (coprocessorHost != null) {
newCell = coprocessorHost.postMutationBeforeWAL(RegionObserver.MutationType.APPEND,
append, oldCell, newCell);
}
kvs.add(newCell);
// Append update to WAL
if (writeToWAL) {
if (walEdits == null) {
walEdits = new WALEdit();
}
walEdits.add(newCell);
}
}
//store the kvs to the temporary memstore before writing WAL
tempMemstore.put(store, kvs);
}
//Actually write to Memstore now
for (Map.Entry<Store, List<Cell>> entry : tempMemstore.entrySet()) {
Store store = entry.getKey();
if (store.getFamily().getMaxVersions() == 1) {
// upsert if VERSIONS for this CF == 1
size += store.upsert(entry.getValue(), getSmallestReadPoint());
memstoreCells.addAll(entry.getValue());
} else {
// otherwise keep older versions around
for (Cell cell: entry.getValue()) {
Pair<Long, Cell> ret = store.add(cell);
size += ret.getFirst();
memstoreCells.add(ret.getSecond());
doRollBackMemstore = true;
}
}
allKVs.addAll(entry.getValue());
}
// Actually write to WAL now
if (writeToWAL) {
// Using default cluster id, as this can only happen in the originating
// cluster. A slave cluster receives the final value (not the delta)
// as a Put.
// we use HLogKey here instead of WALKey directly to support legacy coprocessors.
walKey = new HLogKey(getRegionInfo().getEncodedNameAsBytes(),
this.htableDescriptor.getTableName(), WALKey.NO_SEQUENCE_ID, nonceGroup, nonce);
txid = this.wal.append(this.htableDescriptor, getRegionInfo(), walKey, walEdits,
this.sequenceId, true, memstoreCells);
} else {
recordMutationWithoutWal(append.getFamilyCellMap());
}
if (walKey == null) {
// Append a faked WALEdit in order for SKIP_WAL updates to get mvcc assigned
walKey = this.appendEmptyEdit(this.wal, memstoreCells);
}
size = this.addAndGetGlobalMemstoreSize(size);
flush = isFlushSize(size);
} finally {
this.updatesLock.readLock().unlock();
}
} finally {
rowLock.release();
rowLock = null;
}
// sync the transaction log outside the rowlock
if(txid != 0){
syncOrDefer(txid, durability);
}
doRollBackMemstore = false;
} finally {
if (rowLock != null) {
rowLock.release();
}
// if the wal sync was unsuccessful, remove keys from memstore
if (doRollBackMemstore) {
rollbackMemstore(memstoreCells);
}
if (w != null) {
mvcc.completeMemstoreInsertWithSeqNum(w, walKey);
}
closeRegionOperation(Operation.APPEND);
}
if (this.metricsRegion != null) {
this.metricsRegion.updateAppend();
}
if (flush) {
// Request a cache flush. Do it outside update lock.
requestFlush();
}
return append.isReturnResults() ? Result.create(allKVs) : null;
}
public Result increment(Increment increment) throws IOException {
return increment(increment, HConstants.NO_NONCE, HConstants.NO_NONCE);
}
// TODO: There's a lot of boiler plate code identical to append.
// We should refactor append and increment as local get-mutate-put
// transactions, so all stores only go through one code path for puts.
@Override
public Result increment(Increment increment, long nonceGroup, long nonce)
throws IOException {
byte [] row = increment.getRow();
checkRow(row, "increment");
TimeRange tr = increment.getTimeRange();
boolean flush = false;
Durability durability = getEffectiveDurability(increment.getDurability());
boolean writeToWAL = durability != Durability.SKIP_WAL;
WALEdit walEdits = null;
List<Cell> allKVs = new ArrayList<Cell>(increment.size());
Map<Store, List<Cell>> tempMemstore = new HashMap<Store, List<Cell>>();
long size = 0;
long txid = 0;
checkReadOnly();
checkResources();
// Lock row
startRegionOperation(Operation.INCREMENT);
this.writeRequestsCount.increment();
RowLock rowLock = null;
WriteEntry w = null;
WALKey walKey = null;
long mvccNum = 0;
List<Cell> memstoreCells = new ArrayList<Cell>();
boolean doRollBackMemstore = false;
try {
rowLock = getRowLock(row);
try {
lock(this.updatesLock.readLock());
try {
// wait for all prior MVCC transactions to finish - while we hold the row lock
// (so that we are guaranteed to see the latest state)
mvcc.waitForPreviousTransactionsComplete();
if (this.coprocessorHost != null) {
Result r = this.coprocessorHost.preIncrementAfterRowLock(increment);
if (r != null) {
return r;
}
}
// now start my own transaction
mvccNum = MultiVersionConsistencyControl.getPreAssignedWriteNumber(this.sequenceId);
w = mvcc.beginMemstoreInsertWithSeqNum(mvccNum);
long now = EnvironmentEdgeManager.currentTime();
// Process each family
for (Map.Entry<byte [], List<Cell>> family:
increment.getFamilyCellMap().entrySet()) {
Store store = stores.get(family.getKey());
List<Cell> kvs = new ArrayList<Cell>(family.getValue().size());
// Sort the cells so that they match the order that they
// appear in the Get results. Otherwise, we won't be able to
// find the existing values if the cells are not specified
// in order by the client since cells are in an array list.
Collections.sort(family.getValue(), store.getComparator());
// Get previous values for all columns in this family
Get get = new Get(row);
for (Cell cell: family.getValue()) {
get.addColumn(family.getKey(), CellUtil.cloneQualifier(cell));
}
get.setTimeRange(tr.getMin(), tr.getMax());
List<Cell> results = get(get, false);
// Iterate the input columns and update existing values if they were
// found, otherwise add new column initialized to the increment amount
int idx = 0;
List<Cell> edits = family.getValue();
for (int i = 0; i < edits.size(); i++) {
Cell cell = edits.get(i);
long amount = Bytes.toLong(CellUtil.cloneValue(cell));
boolean noWriteBack = (amount == 0);
List<Tag> newTags = new ArrayList<Tag>();
// Carry forward any tags that might have been added by a coprocessor
if (cell.getTagsLength() > 0) {
Iterator<Tag> itr = CellUtil.tagsIterator(cell.getTagsArray(),
cell.getTagsOffset(), cell.getTagsLength());
while (itr.hasNext()) {
newTags.add(itr.next());
}
}
Cell c = null;
long ts = now;
if (idx < results.size() && CellUtil.matchingQualifier(results.get(idx), cell)) {
c = results.get(idx);
ts = Math.max(now, c.getTimestamp());
if(c.getValueLength() == Bytes.SIZEOF_LONG) {
amount += Bytes.toLong(c.getValueArray(), c.getValueOffset(), Bytes.SIZEOF_LONG);
} else {
// throw DoNotRetryIOException instead of IllegalArgumentException
throw new org.apache.hadoop.hbase.DoNotRetryIOException(
"Attempted to increment field that isn't 64 bits wide");
}
// Carry tags forward from previous version
if (c.getTagsLength() > 0) {
Iterator<Tag> itr = CellUtil.tagsIterator(c.getTagsArray(),
c.getTagsOffset(), c.getTagsLength());
while (itr.hasNext()) {
newTags.add(itr.next());
}
}
if (i < ( edits.size() - 1) && !CellUtil.matchingQualifier(cell, edits.get(i + 1)))
idx++;
}
// Append new incremented KeyValue to list
byte[] q = CellUtil.cloneQualifier(cell);
byte[] val = Bytes.toBytes(amount);
// Add the TTL tag if the mutation carried one
if (increment.getTTL() != Long.MAX_VALUE) {
newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(increment.getTTL())));
}
Cell newKV = new KeyValue(row, 0, row.length,
family.getKey(), 0, family.getKey().length,
q, 0, q.length,
ts,
KeyValue.Type.Put,
val, 0, val.length,
newTags);
CellUtil.setSequenceId(newKV, mvccNum);
// Give coprocessors a chance to update the new cell
if (coprocessorHost != null) {
newKV = coprocessorHost.postMutationBeforeWAL(
RegionObserver.MutationType.INCREMENT, increment, c, newKV);
}
allKVs.add(newKV);
if (!noWriteBack) {
kvs.add(newKV);
// Prepare WAL updates
if (writeToWAL) {
if (walEdits == null) {
walEdits = new WALEdit();
}
walEdits.add(newKV);
}
}
}
//store the kvs to the temporary memstore before writing WAL
if (!kvs.isEmpty()) {
tempMemstore.put(store, kvs);
}
}
//Actually write to Memstore now
if (!tempMemstore.isEmpty()) {
for (Map.Entry<Store, List<Cell>> entry : tempMemstore.entrySet()) {
Store store = entry.getKey();
if (store.getFamily().getMaxVersions() == 1) {
// upsert if VERSIONS for this CF == 1
size += store.upsert(entry.getValue(), getSmallestReadPoint());
memstoreCells.addAll(entry.getValue());
} else {
// otherwise keep older versions around
for (Cell cell : entry.getValue()) {
Pair<Long, Cell> ret = store.add(cell);
size += ret.getFirst();
memstoreCells.add(ret.getSecond());
doRollBackMemstore = true;
}
}
}
size = this.addAndGetGlobalMemstoreSize(size);
flush = isFlushSize(size);
}
// Actually write to WAL now
if (walEdits != null && !walEdits.isEmpty()) {
if (writeToWAL) {
// Using default cluster id, as this can only happen in the originating
// cluster. A slave cluster receives the final value (not the delta)
// as a Put.
// we use HLogKey here instead of WALKey directly to support legacy coprocessors.
walKey = new HLogKey(this.getRegionInfo().getEncodedNameAsBytes(),
this.htableDescriptor.getTableName(), WALKey.NO_SEQUENCE_ID, nonceGroup, nonce);
txid = this.wal.append(this.htableDescriptor, this.getRegionInfo(),
walKey, walEdits, getSequenceId(), true, memstoreCells);
} else {
recordMutationWithoutWal(increment.getFamilyCellMap());
}
}
if(walKey == null){
// Append a faked WALEdit in order for SKIP_WAL updates to get mvccNum assigned
walKey = this.appendEmptyEdit(this.wal, memstoreCells);
}
} finally {
this.updatesLock.readLock().unlock();
}
} finally {
rowLock.release();
rowLock = null;
}
// sync the transaction log outside the rowlock
if(txid != 0){
syncOrDefer(txid, durability);
}
doRollBackMemstore = false;
} finally {
if (rowLock != null) {
rowLock.release();
}
// if the wal sync was unsuccessful, remove keys from memstore
if (doRollBackMemstore) {
rollbackMemstore(memstoreCells);
}
if (w != null) {
mvcc.completeMemstoreInsertWithSeqNum(w, walKey);
}
closeRegionOperation(Operation.INCREMENT);
if (this.metricsRegion != null) {
this.metricsRegion.updateIncrement();
}
}
if (flush) {
// Request a cache flush. Do it outside update lock.
requestFlush();
}
return increment.isReturnResults() ? Result.create(allKVs) : null;
}
//
// New HBASE-880 Helpers
//
private void checkFamily(final byte [] family)
throws NoSuchColumnFamilyException {
if (!this.htableDescriptor.hasFamily(family)) {
throw new NoSuchColumnFamilyException("Column family " +
Bytes.toString(family) + " does not exist in region " + this
+ " in table " + this.htableDescriptor);
}
}
public static final long FIXED_OVERHEAD = ClassSize.align(
ClassSize.OBJECT +
ClassSize.ARRAY +
45 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT +
(14 * Bytes.SIZEOF_LONG) +
5 * Bytes.SIZEOF_BOOLEAN);
// woefully out of date - currently missing:
// 1 x HashMap - coprocessorServiceHandlers
// 6 x Counter - numMutationsWithoutWAL, dataInMemoryWithoutWAL,
// checkAndMutateChecksPassed, checkAndMutateChecksFailed, readRequestsCount,
// writeRequestsCount
// 1 x HRegion$WriteState - writestate
// 1 x RegionCoprocessorHost - coprocessorHost
// 1 x RegionSplitPolicy - splitPolicy
// 1 x MetricsRegion - metricsRegion
// 1 x MetricsRegionWrapperImpl - metricsRegionWrapper
public static final long DEEP_OVERHEAD = FIXED_OVERHEAD +
ClassSize.OBJECT + // closeLock
(2 * ClassSize.ATOMIC_BOOLEAN) + // closed, closing
(3 * ClassSize.ATOMIC_LONG) + // memStoreSize, numPutsWithoutWAL, dataInMemoryWithoutWAL
(2 * ClassSize.CONCURRENT_HASHMAP) + // lockedRows, scannerReadPoints
WriteState.HEAP_SIZE + // writestate
ClassSize.CONCURRENT_SKIPLISTMAP + ClassSize.CONCURRENT_SKIPLISTMAP_ENTRY + // stores
(2 * ClassSize.REENTRANT_LOCK) + // lock, updatesLock
MultiVersionConsistencyControl.FIXED_SIZE // mvcc
+ ClassSize.TREEMAP // maxSeqIdInStores
+ 2 * ClassSize.ATOMIC_INTEGER // majorInProgress, minorInProgress
;
@Override
public long heapSize() {
long heapSize = DEEP_OVERHEAD;
for (Store store : this.stores.values()) {
heapSize += store.heapSize();
}
// this does not take into account row locks, recent flushes, mvcc entries, and more
return heapSize;
}
/*
* This method calls System.exit.
* @param message Message to print out. May be null.
*/
private static void printUsageAndExit(final String message) {
if (message != null && message.length() > 0) System.out.println(message);
System.out.println("Usage: HRegion CATALOG_TABLE_DIR [major_compact]");
System.out.println("Options:");
System.out.println(" major_compact Pass this option to major compact " +
"passed region.");
System.out.println("Default outputs scan of passed region.");
System.exit(1);
}
@Override
public boolean registerService(Service instance) {
/*
* No stacking of instances is allowed for a single service name
*/
Descriptors.ServiceDescriptor serviceDesc = instance.getDescriptorForType();
if (coprocessorServiceHandlers.containsKey(serviceDesc.getFullName())) {
LOG.error("Coprocessor service "+serviceDesc.getFullName()+
" already registered, rejecting request from "+instance
);
return false;
}
coprocessorServiceHandlers.put(serviceDesc.getFullName(), instance);
if (LOG.isDebugEnabled()) {
LOG.debug("Registered coprocessor service: region=" +
Bytes.toStringBinary(getRegionInfo().getRegionName()) +
" service=" + serviceDesc.getFullName());
}
return true;
}
@Override
public Message execService(RpcController controller, CoprocessorServiceCall call)
throws IOException {
String serviceName = call.getServiceName();
String methodName = call.getMethodName();
if (!coprocessorServiceHandlers.containsKey(serviceName)) {
throw new UnknownProtocolException(null,
"No registered coprocessor service found for name "+serviceName+
" in region "+Bytes.toStringBinary(getRegionInfo().getRegionName()));
}
Service service = coprocessorServiceHandlers.get(serviceName);
Descriptors.ServiceDescriptor serviceDesc = service.getDescriptorForType();
Descriptors.MethodDescriptor methodDesc = serviceDesc.findMethodByName(methodName);
if (methodDesc == null) {
throw new UnknownProtocolException(service.getClass(),
"Unknown method "+methodName+" called on service "+serviceName+
" in region "+Bytes.toStringBinary(getRegionInfo().getRegionName()));
}
Message request = service.getRequestPrototype(methodDesc).newBuilderForType()
.mergeFrom(call.getRequest()).build();
if (coprocessorHost != null) {
request = coprocessorHost.preEndpointInvocation(service, methodName, request);
}
final Message.Builder responseBuilder =
service.getResponsePrototype(methodDesc).newBuilderForType();
service.callMethod(methodDesc, controller, request, new RpcCallback<Message>() {
@Override
public void run(Message message) {
if (message != null) {
responseBuilder.mergeFrom(message);
}
}
});
if (coprocessorHost != null) {
coprocessorHost.postEndpointInvocation(service, methodName, request, responseBuilder);
}
return responseBuilder.build();
}
/*
* Process table.
* Do major compaction or list content.
* @throws IOException
*/
private static void processTable(final FileSystem fs, final Path p,
final WALFactory walFactory, final Configuration c,
final boolean majorCompact)
throws IOException {
HRegion region;
FSTableDescriptors fst = new FSTableDescriptors(c);
// Currently expects tables have one region only.
if (FSUtils.getTableName(p).equals(TableName.META_TABLE_NAME)) {
final WAL wal = walFactory.getMetaWAL(
HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes());
region = HRegion.newHRegion(p, wal, fs, c,
HRegionInfo.FIRST_META_REGIONINFO, fst.get(TableName.META_TABLE_NAME), null);
} else {
throw new IOException("Not a known catalog table: " + p.toString());
}
try {
region.initialize(null);
if (majorCompact) {
region.compact(true);
} else {
// Default behavior
Scan scan = new Scan();
// scan.addFamily(HConstants.CATALOG_FAMILY);
RegionScanner scanner = region.getScanner(scan);
try {
List<Cell> kvs = new ArrayList<Cell>();
boolean done;
do {
kvs.clear();
done = scanner.next(kvs);
if (kvs.size() > 0) LOG.info(kvs);
} while (done);
} finally {
scanner.close();
}
}
} finally {
region.close();
}
}
boolean shouldForceSplit() {
return this.splitRequest;
}
byte[] getExplicitSplitPoint() {
return this.explicitSplitPoint;
}
void forceSplit(byte[] sp) {
// This HRegion will go away after the forced split is successful
// But if a forced split fails, we need to clear forced split.
this.splitRequest = true;
if (sp != null) {
this.explicitSplitPoint = sp;
}
}
void clearSplit() {
this.splitRequest = false;
this.explicitSplitPoint = null;
}
/**
* Give the region a chance to prepare before it is split.
*/
protected void prepareToSplit() {
// nothing
}
/**
* Return the splitpoint. null indicates the region isn't splittable
* If the splitpoint isn't explicitly specified, it will go over the stores
* to find the best splitpoint. Currently the criteria of best splitpoint
* is based on the size of the store.
*/
public byte[] checkSplit() {
// Can't split META
if (this.getRegionInfo().isMetaTable() ||
TableName.NAMESPACE_TABLE_NAME.equals(this.getRegionInfo().getTable())) {
if (shouldForceSplit()) {
LOG.warn("Cannot split meta region in HBase 0.20 and above");
}
return null;
}
// Can't split region which is in recovering state
if (this.isRecovering()) {
LOG.info("Cannot split region " + this.getRegionInfo().getEncodedName() + " in recovery.");
return null;
}
if (!splitPolicy.shouldSplit()) {
return null;
}
byte[] ret = splitPolicy.getSplitPoint();
if (ret != null) {
try {
checkRow(ret, "calculated split");
} catch (IOException e) {
LOG.error("Ignoring invalid split", e);
return null;
}
}
return ret;
}
/**
* @return The priority that this region should have in the compaction queue
*/
public int getCompactPriority() {
int count = Integer.MAX_VALUE;
for (Store store : stores.values()) {
count = Math.min(count, store.getCompactPriority());
}
return count;
}
/** @return the coprocessor host */
public RegionCoprocessorHost getCoprocessorHost() {
return coprocessorHost;
}
/** @param coprocessorHost the new coprocessor host */
public void setCoprocessorHost(final RegionCoprocessorHost coprocessorHost) {
this.coprocessorHost = coprocessorHost;
}
@Override
public void startRegionOperation() throws IOException {
startRegionOperation(Operation.ANY);
}
@Override
public void startRegionOperation(Operation op) throws IOException {
switch (op) {
case GET: // read operations
case SCAN:
checkReadsEnabled();
case INCREMENT: // write operations
case APPEND:
case SPLIT_REGION:
case MERGE_REGION:
case PUT:
case DELETE:
case BATCH_MUTATE:
case COMPACT_REGION:
// when a region is in recovering state, no read, split or merge is allowed
if (isRecovering() && (this.disallowWritesInRecovering ||
(op != Operation.PUT && op != Operation.DELETE && op != Operation.BATCH_MUTATE))) {
throw new RegionInRecoveryException(getRegionInfo().getRegionNameAsString() +
" is recovering; cannot take reads");
}
break;
default:
break;
}
if (op == Operation.MERGE_REGION || op == Operation.SPLIT_REGION
|| op == Operation.COMPACT_REGION) {
// split, merge or compact region doesn't need to check the closing/closed state or lock the
// region
return;
}
if (this.closing.get()) {
throw new NotServingRegionException(getRegionInfo().getRegionNameAsString() + " is closing");
}
lock(lock.readLock());
if (this.closed.get()) {
lock.readLock().unlock();
throw new NotServingRegionException(getRegionInfo().getRegionNameAsString() + " is closed");
}
try {
if (coprocessorHost != null) {
coprocessorHost.postStartRegionOperation(op);
}
} catch (Exception e) {
lock.readLock().unlock();
throw new IOException(e);
}
}
@Override
public void closeRegionOperation() throws IOException {
closeRegionOperation(Operation.ANY);
}
/**
* Closes the lock. This needs to be called in the finally block corresponding
* to the try block of {@link #startRegionOperation(Operation)}
* @throws IOException
*/
public void closeRegionOperation(Operation operation) throws IOException {
lock.readLock().unlock();
if (coprocessorHost != null) {
coprocessorHost.postCloseRegionOperation(operation);
}
}
/**
* This method needs to be called before any public call that reads or
* modifies stores in bulk. It has to be called just before a try.
* #closeBulkRegionOperation needs to be called in the try's finally block
* Acquires a writelock and checks if the region is closing or closed.
* @throws NotServingRegionException when the region is closing or closed
* @throws RegionTooBusyException if failed to get the lock in time
* @throws InterruptedIOException if interrupted while waiting for a lock
*/
private void startBulkRegionOperation(boolean writeLockNeeded)
throws NotServingRegionException, RegionTooBusyException, InterruptedIOException {
if (this.closing.get()) {
throw new NotServingRegionException(getRegionInfo().getRegionNameAsString() + " is closing");
}
if (writeLockNeeded) lock(lock.writeLock());
else lock(lock.readLock());
if (this.closed.get()) {
if (writeLockNeeded) lock.writeLock().unlock();
else lock.readLock().unlock();
throw new NotServingRegionException(getRegionInfo().getRegionNameAsString() + " is closed");
}
}
/**
* Closes the lock. This needs to be called in the finally block corresponding
* to the try block of #startRegionOperation
*/
private void closeBulkRegionOperation(){
if (lock.writeLock().isHeldByCurrentThread()) lock.writeLock().unlock();
else lock.readLock().unlock();
}
/**
* Update counters for numer of puts without wal and the size of possible data loss.
* These information are exposed by the region server metrics.
*/
private void recordMutationWithoutWal(final Map<byte [], List<Cell>> familyMap) {
numMutationsWithoutWAL.increment();
if (numMutationsWithoutWAL.get() <= 1) {
LOG.info("writing data to region " + this +
" with WAL disabled. Data may be lost in the event of a crash.");
}
long mutationSize = 0;
for (List<Cell> cells: familyMap.values()) {
assert cells instanceof RandomAccess;
int listSize = cells.size();
for (int i=0; i < listSize; i++) {
Cell cell = cells.get(i);
// TODO we need include tags length also here.
mutationSize += KeyValueUtil.keyLength(cell) + cell.getValueLength();
}
}
dataInMemoryWithoutWAL.add(mutationSize);
}
private void lock(final Lock lock)
throws RegionTooBusyException, InterruptedIOException {
lock(lock, 1);
}
/**
* Try to acquire a lock. Throw RegionTooBusyException
* if failed to get the lock in time. Throw InterruptedIOException
* if interrupted while waiting for the lock.
*/
private void lock(final Lock lock, final int multiplier)
throws RegionTooBusyException, InterruptedIOException {
try {
final long waitTime = Math.min(maxBusyWaitDuration,
busyWaitDuration * Math.min(multiplier, maxBusyWaitMultiplier));
if (!lock.tryLock(waitTime, TimeUnit.MILLISECONDS)) {
throw new RegionTooBusyException(
"failed to get a lock in " + waitTime + " ms. " +
"regionName=" + (this.getRegionInfo() == null ? "unknown" :
this.getRegionInfo().getRegionNameAsString()) +
", server=" + (this.getRegionServerServices() == null ? "unknown" :
this.getRegionServerServices().getServerName()));
}
} catch (InterruptedException ie) {
LOG.info("Interrupted while waiting for a lock");
InterruptedIOException iie = new InterruptedIOException();
iie.initCause(ie);
throw iie;
}
}
/**
* Calls sync with the given transaction ID if the region's table is not
* deferring it.
* @param txid should sync up to which transaction
* @throws IOException If anything goes wrong with DFS
*/
private void syncOrDefer(long txid, Durability durability) throws IOException {
if (this.getRegionInfo().isMetaRegion()) {
this.wal.sync(txid);
} else {
switch(durability) {
case USE_DEFAULT:
// do what table defaults to
if (shouldSyncWAL()) {
this.wal.sync(txid);
}
break;
case SKIP_WAL:
// nothing do to
break;
case ASYNC_WAL:
// nothing do to
break;
case SYNC_WAL:
case FSYNC_WAL:
// sync the WAL edit (SYNC and FSYNC treated the same for now)
this.wal.sync(txid);
break;
}
}
}
/**
* Check whether we should sync the wal from the table's durability settings
*/
private boolean shouldSyncWAL() {
return durability.ordinal() > Durability.ASYNC_WAL.ordinal();
}
/**
* A mocked list implementation - discards all updates.
*/
private static final List<Cell> MOCKED_LIST = new AbstractList<Cell>() {
@Override
public void add(int index, Cell element) {
// do nothing
}
@Override
public boolean addAll(int index, Collection<? extends Cell> c) {
return false; // this list is never changed as a result of an update
}
@Override
public KeyValue get(int index) {
throw new UnsupportedOperationException();
}
@Override
public int size() {
return 0;
}
};
/**
* Facility for dumping and compacting catalog tables.
* Only does catalog tables since these are only tables we for sure know
* schema on. For usage run:
* <pre>
* ./bin/hbase org.apache.hadoop.hbase.regionserver.HRegion
* </pre>
* @throws IOException
*/
public static void main(String[] args) throws IOException {
if (args.length < 1) {
printUsageAndExit(null);
}
boolean majorCompact = false;
if (args.length > 1) {
if (!args[1].toLowerCase().startsWith("major")) {
printUsageAndExit("ERROR: Unrecognized option <" + args[1] + ">");
}
majorCompact = true;
}
final Path tableDir = new Path(args[0]);
final Configuration c = HBaseConfiguration.create();
final FileSystem fs = FileSystem.get(c);
final Path logdir = new Path(c.get("hbase.tmp.dir"));
final String logname = "wal" + FSUtils.getTableName(tableDir) + System.currentTimeMillis();
final Configuration walConf = new Configuration(c);
FSUtils.setRootDir(walConf, logdir);
final WALFactory wals = new WALFactory(walConf, null, logname);
try {
processTable(fs, tableDir, wals, c, majorCompact);
} finally {
wals.close();
// TODO: is this still right?
BlockCache bc = new CacheConfig(c).getBlockCache();
if (bc != null) bc.shutdown();
}
}
@Override
public long getOpenSeqNum() {
return this.openSeqNum;
}
@Override
public Map<byte[], Long> getMaxStoreSeqId() {
return this.maxSeqIdInStores;
}
@Override
public long getOldestSeqIdOfStore(byte[] familyName) {
return wal.getEarliestMemstoreSeqNum(getRegionInfo()
.getEncodedNameAsBytes(), familyName);
}
@Override
public CompactionState getCompactionState() {
boolean hasMajor = majorInProgress.get() > 0, hasMinor = minorInProgress.get() > 0;
return (hasMajor ? (hasMinor ? CompactionState.MAJOR_AND_MINOR : CompactionState.MAJOR)
: (hasMinor ? CompactionState.MINOR : CompactionState.NONE));
}
public void reportCompactionRequestStart(boolean isMajor){
(isMajor ? majorInProgress : minorInProgress).incrementAndGet();
}
public void reportCompactionRequestEnd(boolean isMajor, int numFiles, long filesSizeCompacted) {
int newValue = (isMajor ? majorInProgress : minorInProgress).decrementAndGet();
// metrics
compactionsFinished.incrementAndGet();
compactionNumFilesCompacted.addAndGet(numFiles);
compactionNumBytesCompacted.addAndGet(filesSizeCompacted);
assert newValue >= 0;
}
/**
* Do not change this sequence id. See {@link #sequenceId} comment.
* @return sequenceId
*/
@VisibleForTesting
public AtomicLong getSequenceId() {
return this.sequenceId;
}
/**
* sets this region's sequenceId.
* @param value new value
*/
private void setSequenceId(long value) {
this.sequenceId.set(value);
}
@VisibleForTesting class RowLockContext {
private final HashedBytes row;
private final CountDownLatch latch = new CountDownLatch(1);
private final Thread thread;
private int lockCount = 0;
RowLockContext(HashedBytes row) {
this.row = row;
this.thread = Thread.currentThread();
}
boolean ownedByCurrentThread() {
return thread == Thread.currentThread();
}
RowLock newLock() {
lockCount++;
RowLockImpl rl = new RowLockImpl();
rl.setContext(this);
return rl;
}
void releaseLock() {
if (!ownedByCurrentThread()) {
throw new IllegalArgumentException("Lock held by thread: " + thread
+ " cannot be released by different thread: " + Thread.currentThread());
}
lockCount--;
if (lockCount == 0) {
// no remaining locks by the thread, unlock and allow other threads to access
RowLockContext existingContext = lockedRows.remove(row);
if (existingContext != this) {
throw new RuntimeException(
"Internal row lock state inconsistent, should not happen, row: " + row);
}
latch.countDown();
}
}
}
public static class RowLockImpl implements RowLock {
private RowLockContext context;
private boolean released = false;
@VisibleForTesting
public RowLockContext getContext() {
return context;
}
@VisibleForTesting
public void setContext(RowLockContext context) {
this.context = context;
}
@Override
public void release() {
if (!released) {
context.releaseLock();
}
released = true;
}
}
/**
* Append a faked WALEdit in order to get a long sequence number and wal syncer will just ignore
* the WALEdit append later.
* @param wal
* @param cells list of Cells inserted into memstore. Those Cells are passed in order to
* be updated with right mvcc values(their wal sequence number)
* @return Return the key used appending with no sync and no append.
* @throws IOException
*/
private WALKey appendEmptyEdit(final WAL wal, List<Cell> cells) throws IOException {
// we use HLogKey here instead of WALKey directly to support legacy coprocessors.
WALKey key = new HLogKey(getRegionInfo().getEncodedNameAsBytes(), getRegionInfo().getTable(),
WALKey.NO_SEQUENCE_ID, 0, null, HConstants.NO_NONCE, HConstants.NO_NONCE);
// Call append but with an empty WALEdit. The returned seqeunce id will not be associated
// with any edit and we can be sure it went in after all outstanding appends.
wal.append(getTableDesc(), getRegionInfo(), key,
WALEdit.EMPTY_WALEDIT, this.sequenceId, false, cells);
return key;
}
/**
* {@inheritDoc}
*/
@Override
public void onConfigurationChange(Configuration conf) {
// Do nothing for now.
}
/**
* {@inheritDoc}
*/
@Override
public void registerChildren(ConfigurationManager manager) {
configurationManager = Optional.of(manager);
for (Store s : this.stores.values()) {
configurationManager.get().registerObserver(s);
}
}
/**
* {@inheritDoc}
*/
@Override
public void deregisterChildren(ConfigurationManager manager) {
for (Store s : this.stores.values()) {
configurationManager.get().deregisterObserver(s);
}
}
/**
* @return split policy for this region.
*/
public RegionSplitPolicy getSplitPolicy() {
return this.splitPolicy;
}
}
| hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.EOFException;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Constructor;
import java.text.ParseException;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.RandomAccess;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompoundConfiguration;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.RegionTooBusyException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.backup.HFileArchiver;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.conf.ConfigurationManager;
import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.exceptions.RegionInRecoveryException;
import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterWrapper;
import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
import org.apache.hadoop.hbase.ipc.RpcCallContext;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall;
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad;
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.RegionEventDescriptor.EventType;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.StoreDescriptor;
import org.apache.hadoop.hbase.regionserver.MultiVersionConsistencyControl.WriteEntry;
import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionThroughputController;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionThroughputControllerFactory;
import org.apache.hadoop.hbase.regionserver.compactions.NoLimitCompactionThroughputController;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.MetricsWAL;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.regionserver.wal.ReplayHLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.regionserver.wal.WALUtil;
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.CompressionTest;
import org.apache.hadoop.hbase.util.Counter;
import org.apache.hadoop.hbase.util.EncryptionTest;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HashedBytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;
import org.apache.hadoop.io.MultipleIOException;
import org.apache.hadoop.util.StringUtils;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.io.Closeables;
import com.google.protobuf.ByteString;
import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
import com.google.protobuf.TextFormat;
@InterfaceAudience.Private
public class HRegion implements HeapSize, PropagatingConfigurationObserver, Region {
public static final Log LOG = LogFactory.getLog(HRegion.class);
public static final String LOAD_CFS_ON_DEMAND_CONFIG_KEY =
"hbase.hregion.scan.loadColumnFamiliesOnDemand";
/**
* This is the global default value for durability. All tables/mutations not
* defining a durability or using USE_DEFAULT will default to this value.
*/
private static final Durability DEFAULT_DURABILITY = Durability.SYNC_WAL;
final AtomicBoolean closed = new AtomicBoolean(false);
/* Closing can take some time; use the closing flag if there is stuff we don't
* want to do while in closing state; e.g. like offer this region up to the
* master as a region to close if the carrying regionserver is overloaded.
* Once set, it is never cleared.
*/
final AtomicBoolean closing = new AtomicBoolean(false);
/**
* The max sequence id of flushed data on this region. Used doing some rough calculations on
* whether time to flush or not.
*/
private volatile long maxFlushedSeqId = HConstants.NO_SEQNUM;
/**
* Record the sequence id of last flush operation.
*/
private volatile long lastFlushOpSeqId = HConstants.NO_SEQNUM;
/**
* Region scoped edit sequence Id. Edits to this region are GUARANTEED to appear in the WAL
* file in this sequence id's order; i.e. edit #2 will be in the WAL after edit #1.
* Its default value is -1L. This default is used as a marker to indicate
* that the region hasn't opened yet. Once it is opened, it is set to the derived
* {@link #openSeqNum}, the largest sequence id of all hfiles opened under this Region.
*
* <p>Control of this sequence is handed off to the WAL implementation. It is responsible
* for tagging edits with the correct sequence id since it is responsible for getting the
* edits into the WAL files. It controls updating the sequence id value. DO NOT UPDATE IT
* OUTSIDE OF THE WAL. The value you get will not be what you think it is.
*/
private final AtomicLong sequenceId = new AtomicLong(-1L);
/**
* The sequence id of the last replayed open region event from the primary region. This is used
* to skip entries before this due to the possibility of replay edits coming out of order from
* replication.
*/
protected volatile long lastReplayedOpenRegionSeqId = -1L;
protected volatile long lastReplayedCompactionSeqId = -1L;
//////////////////////////////////////////////////////////////////////////////
// Members
//////////////////////////////////////////////////////////////////////////////
// map from a locked row to the context for that lock including:
// - CountDownLatch for threads waiting on that row
// - the thread that owns the lock (allow reentrancy)
// - reference count of (reentrant) locks held by the thread
// - the row itself
private final ConcurrentHashMap<HashedBytes, RowLockContext> lockedRows =
new ConcurrentHashMap<HashedBytes, RowLockContext>();
protected final Map<byte[], Store> stores = new ConcurrentSkipListMap<byte[], Store>(
Bytes.BYTES_RAWCOMPARATOR);
// TODO: account for each registered handler in HeapSize computation
private Map<String, Service> coprocessorServiceHandlers = Maps.newHashMap();
public final AtomicLong memstoreSize = new AtomicLong(0);
// Debug possible data loss due to WAL off
final Counter numMutationsWithoutWAL = new Counter();
final Counter dataInMemoryWithoutWAL = new Counter();
// Debug why CAS operations are taking a while.
final Counter checkAndMutateChecksPassed = new Counter();
final Counter checkAndMutateChecksFailed = new Counter();
//Number of requests
final Counter readRequestsCount = new Counter();
final Counter writeRequestsCount = new Counter();
// Number of requests blocked by memstore size.
private final Counter blockedRequestsCount = new Counter();
// Compaction counters
final AtomicLong compactionsFinished = new AtomicLong(0L);
final AtomicLong compactionNumFilesCompacted = new AtomicLong(0L);
final AtomicLong compactionNumBytesCompacted = new AtomicLong(0L);
private final WAL wal;
private final HRegionFileSystem fs;
protected final Configuration conf;
private final Configuration baseConf;
private final KeyValue.KVComparator comparator;
private final int rowLockWaitDuration;
static final int DEFAULT_ROWLOCK_WAIT_DURATION = 30000;
// The internal wait duration to acquire a lock before read/update
// from the region. It is not per row. The purpose of this wait time
// is to avoid waiting a long time while the region is busy, so that
// we can release the IPC handler soon enough to improve the
// availability of the region server. It can be adjusted by
// tuning configuration "hbase.busy.wait.duration".
final long busyWaitDuration;
static final long DEFAULT_BUSY_WAIT_DURATION = HConstants.DEFAULT_HBASE_RPC_TIMEOUT;
// If updating multiple rows in one call, wait longer,
// i.e. waiting for busyWaitDuration * # of rows. However,
// we can limit the max multiplier.
final int maxBusyWaitMultiplier;
// Max busy wait duration. There is no point to wait longer than the RPC
// purge timeout, when a RPC call will be terminated by the RPC engine.
final long maxBusyWaitDuration;
// negative number indicates infinite timeout
static final long DEFAULT_ROW_PROCESSOR_TIMEOUT = 60 * 1000L;
final ExecutorService rowProcessorExecutor = Executors.newCachedThreadPool();
private final ConcurrentHashMap<RegionScanner, Long> scannerReadPoints;
/**
* The sequence ID that was encountered when this region was opened.
*/
private long openSeqNum = HConstants.NO_SEQNUM;
/**
* The default setting for whether to enable on-demand CF loading for
* scan requests to this region. Requests can override it.
*/
private boolean isLoadingCfsOnDemandDefault = false;
private final AtomicInteger majorInProgress = new AtomicInteger(0);
private final AtomicInteger minorInProgress = new AtomicInteger(0);
//
// Context: During replay we want to ensure that we do not lose any data. So, we
// have to be conservative in how we replay wals. For each store, we calculate
// the maxSeqId up to which the store was flushed. And, skip the edits which
// are equal to or lower than maxSeqId for each store.
// The following map is populated when opening the region
Map<byte[], Long> maxSeqIdInStores = new TreeMap<byte[], Long>(Bytes.BYTES_COMPARATOR);
/** Saved state from replaying prepare flush cache */
private PrepareFlushResult prepareFlushResult = null;
/**
* Config setting for whether to allow writes when a region is in recovering or not.
*/
private boolean disallowWritesInRecovering = false;
// when a region is in recovering state, it can only accept writes not reads
private volatile boolean isRecovering = false;
private volatile Optional<ConfigurationManager> configurationManager;
/**
* @return The smallest mvcc readPoint across all the scanners in this
* region. Writes older than this readPoint, are included in every
* read operation.
*/
public long getSmallestReadPoint() {
long minimumReadPoint;
// We need to ensure that while we are calculating the smallestReadPoint
// no new RegionScanners can grab a readPoint that we are unaware of.
// We achieve this by synchronizing on the scannerReadPoints object.
synchronized(scannerReadPoints) {
minimumReadPoint = mvcc.memstoreReadPoint();
for (Long readPoint: this.scannerReadPoints.values()) {
if (readPoint < minimumReadPoint) {
minimumReadPoint = readPoint;
}
}
}
return minimumReadPoint;
}
/*
* Data structure of write state flags used coordinating flushes,
* compactions and closes.
*/
static class WriteState {
// Set while a memstore flush is happening.
volatile boolean flushing = false;
// Set when a flush has been requested.
volatile boolean flushRequested = false;
// Number of compactions running.
volatile int compacting = 0;
// Gets set in close. If set, cannot compact or flush again.
volatile boolean writesEnabled = true;
// Set if region is read-only
volatile boolean readOnly = false;
// whether the reads are enabled. This is different than readOnly, because readOnly is
// static in the lifetime of the region, while readsEnabled is dynamic
volatile boolean readsEnabled = true;
/**
* Set flags that make this region read-only.
*
* @param onOff flip value for region r/o setting
*/
synchronized void setReadOnly(final boolean onOff) {
this.writesEnabled = !onOff;
this.readOnly = onOff;
}
boolean isReadOnly() {
return this.readOnly;
}
boolean isFlushRequested() {
return this.flushRequested;
}
void setReadsEnabled(boolean readsEnabled) {
this.readsEnabled = readsEnabled;
}
static final long HEAP_SIZE = ClassSize.align(
ClassSize.OBJECT + 5 * Bytes.SIZEOF_BOOLEAN);
}
/**
* Objects from this class are created when flushing to describe all the different states that
* that method ends up in. The Result enum describes those states. The sequence id should only
* be specified if the flush was successful, and the failure message should only be specified
* if it didn't flush.
*/
public static class FlushResultImpl implements FlushResult {
final Result result;
final String failureReason;
final long flushSequenceId;
final boolean wroteFlushWalMarker;
/**
* Convenience constructor to use when the flush is successful, the failure message is set to
* null.
* @param result Expecting FLUSHED_NO_COMPACTION_NEEDED or FLUSHED_COMPACTION_NEEDED.
* @param flushSequenceId Generated sequence id that comes right after the edits in the
* memstores.
*/
FlushResultImpl(Result result, long flushSequenceId) {
this(result, flushSequenceId, null, false);
assert result == Result.FLUSHED_NO_COMPACTION_NEEDED || result == Result
.FLUSHED_COMPACTION_NEEDED;
}
/**
* Convenience constructor to use when we cannot flush.
* @param result Expecting CANNOT_FLUSH_MEMSTORE_EMPTY or CANNOT_FLUSH.
* @param failureReason Reason why we couldn't flush.
*/
FlushResultImpl(Result result, String failureReason, boolean wroteFlushMarker) {
this(result, -1, failureReason, wroteFlushMarker);
assert result == Result.CANNOT_FLUSH_MEMSTORE_EMPTY || result == Result.CANNOT_FLUSH;
}
/**
* Constructor with all the parameters.
* @param result Any of the Result.
* @param flushSequenceId Generated sequence id if the memstores were flushed else -1.
* @param failureReason Reason why we couldn't flush, or null.
*/
FlushResultImpl(Result result, long flushSequenceId, String failureReason,
boolean wroteFlushMarker) {
this.result = result;
this.flushSequenceId = flushSequenceId;
this.failureReason = failureReason;
this.wroteFlushWalMarker = wroteFlushMarker;
}
/**
* Convenience method, the equivalent of checking if result is
* FLUSHED_NO_COMPACTION_NEEDED or FLUSHED_NO_COMPACTION_NEEDED.
* @return true if the memstores were flushed, else false.
*/
public boolean isFlushSucceeded() {
return result == Result.FLUSHED_NO_COMPACTION_NEEDED || result == Result
.FLUSHED_COMPACTION_NEEDED;
}
/**
* Convenience method, the equivalent of checking if result is FLUSHED_COMPACTION_NEEDED.
* @return True if the flush requested a compaction, else false (doesn't even mean it flushed).
*/
public boolean isCompactionNeeded() {
return result == Result.FLUSHED_COMPACTION_NEEDED;
}
@Override
public String toString() {
return new StringBuilder()
.append("flush result:").append(result).append(", ")
.append("failureReason:").append(failureReason).append(",")
.append("flush seq id").append(flushSequenceId).toString();
}
@Override
public Result getResult() {
return result;
}
}
/** A result object from prepare flush cache stage */
@VisibleForTesting
static class PrepareFlushResult {
final FlushResult result; // indicating a failure result from prepare
final TreeMap<byte[], StoreFlushContext> storeFlushCtxs;
final TreeMap<byte[], List<Path>> committedFiles;
final long startTime;
final long flushOpSeqId;
final long flushedSeqId;
final long totalFlushableSize;
/** Constructs an early exit case */
PrepareFlushResult(FlushResult result, long flushSeqId) {
this(result, null, null, Math.max(0, flushSeqId), 0, 0, 0);
}
/** Constructs a successful prepare flush result */
PrepareFlushResult(
TreeMap<byte[], StoreFlushContext> storeFlushCtxs,
TreeMap<byte[], List<Path>> committedFiles, long startTime, long flushSeqId,
long flushedSeqId, long totalFlushableSize) {
this(null, storeFlushCtxs, committedFiles, startTime,
flushSeqId, flushedSeqId, totalFlushableSize);
}
private PrepareFlushResult(
FlushResult result,
TreeMap<byte[], StoreFlushContext> storeFlushCtxs,
TreeMap<byte[], List<Path>> committedFiles, long startTime, long flushSeqId,
long flushedSeqId, long totalFlushableSize) {
this.result = result;
this.storeFlushCtxs = storeFlushCtxs;
this.committedFiles = committedFiles;
this.startTime = startTime;
this.flushOpSeqId = flushSeqId;
this.flushedSeqId = flushedSeqId;
this.totalFlushableSize = totalFlushableSize;
}
public FlushResult getResult() {
return this.result;
}
}
final WriteState writestate = new WriteState();
long memstoreFlushSize;
final long timestampSlop;
final long rowProcessorTimeout;
// Last flush time for each Store. Useful when we are flushing for each column
private final ConcurrentMap<Store, Long> lastStoreFlushTimeMap =
new ConcurrentHashMap<Store, Long>();
final RegionServerServices rsServices;
private RegionServerAccounting rsAccounting;
private long flushCheckInterval;
// flushPerChanges is to prevent too many changes in memstore
private long flushPerChanges;
private long blockingMemStoreSize;
final long threadWakeFrequency;
// Used to guard closes
final ReentrantReadWriteLock lock =
new ReentrantReadWriteLock();
// Stop updates lock
private final ReentrantReadWriteLock updatesLock =
new ReentrantReadWriteLock();
private boolean splitRequest;
private byte[] explicitSplitPoint = null;
private final MultiVersionConsistencyControl mvcc =
new MultiVersionConsistencyControl();
// Coprocessor host
private RegionCoprocessorHost coprocessorHost;
private HTableDescriptor htableDescriptor = null;
private RegionSplitPolicy splitPolicy;
private FlushPolicy flushPolicy;
private final MetricsRegion metricsRegion;
private final MetricsRegionWrapperImpl metricsRegionWrapper;
private final Durability durability;
private final boolean regionStatsEnabled;
/**
* HRegion constructor. This constructor should only be used for testing and
* extensions. Instances of HRegion should be instantiated with the
* {@link HRegion#createHRegion} or {@link HRegion#openHRegion} method.
*
* @param tableDir qualified path of directory where region should be located,
* usually the table directory.
* @param wal The WAL is the outbound log for any updates to the HRegion
* The wal file is a logfile from the previous execution that's
* custom-computed for this HRegion. The HRegionServer computes and sorts the
* appropriate wal info for this HRegion. If there is a previous wal file
* (implying that the HRegion has been written-to before), then read it from
* the supplied path.
* @param fs is the filesystem.
* @param confParam is global configuration settings.
* @param regionInfo - HRegionInfo that describes the region
* is new), then read them from the supplied path.
* @param htd the table descriptor
* @param rsServices reference to {@link RegionServerServices} or null
*/
@Deprecated
public HRegion(final Path tableDir, final WAL wal, final FileSystem fs,
final Configuration confParam, final HRegionInfo regionInfo,
final HTableDescriptor htd, final RegionServerServices rsServices) {
this(new HRegionFileSystem(confParam, fs, tableDir, regionInfo),
wal, confParam, htd, rsServices);
}
/**
* HRegion constructor. This constructor should only be used for testing and
* extensions. Instances of HRegion should be instantiated with the
* {@link HRegion#createHRegion} or {@link HRegion#openHRegion} method.
*
* @param fs is the filesystem.
* @param wal The WAL is the outbound log for any updates to the HRegion
* The wal file is a logfile from the previous execution that's
* custom-computed for this HRegion. The HRegionServer computes and sorts the
* appropriate wal info for this HRegion. If there is a previous wal file
* (implying that the HRegion has been written-to before), then read it from
* the supplied path.
* @param confParam is global configuration settings.
* @param htd the table descriptor
* @param rsServices reference to {@link RegionServerServices} or null
*/
public HRegion(final HRegionFileSystem fs, final WAL wal, final Configuration confParam,
final HTableDescriptor htd, final RegionServerServices rsServices) {
if (htd == null) {
throw new IllegalArgumentException("Need table descriptor");
}
if (confParam instanceof CompoundConfiguration) {
throw new IllegalArgumentException("Need original base configuration");
}
this.comparator = fs.getRegionInfo().getComparator();
this.wal = wal;
this.fs = fs;
// 'conf' renamed to 'confParam' b/c we use this.conf in the constructor
this.baseConf = confParam;
this.conf = new CompoundConfiguration()
.add(confParam)
.addStringMap(htd.getConfiguration())
.addWritableMap(htd.getValues());
this.flushCheckInterval = conf.getInt(MEMSTORE_PERIODIC_FLUSH_INTERVAL,
DEFAULT_CACHE_FLUSH_INTERVAL);
this.flushPerChanges = conf.getLong(MEMSTORE_FLUSH_PER_CHANGES, DEFAULT_FLUSH_PER_CHANGES);
if (this.flushPerChanges > MAX_FLUSH_PER_CHANGES) {
throw new IllegalArgumentException(MEMSTORE_FLUSH_PER_CHANGES + " can not exceed "
+ MAX_FLUSH_PER_CHANGES);
}
this.rowLockWaitDuration = conf.getInt("hbase.rowlock.wait.duration",
DEFAULT_ROWLOCK_WAIT_DURATION);
this.isLoadingCfsOnDemandDefault = conf.getBoolean(LOAD_CFS_ON_DEMAND_CONFIG_KEY, true);
this.htableDescriptor = htd;
this.rsServices = rsServices;
this.threadWakeFrequency = conf.getLong(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);
setHTableSpecificConf();
this.scannerReadPoints = new ConcurrentHashMap<RegionScanner, Long>();
this.busyWaitDuration = conf.getLong(
"hbase.busy.wait.duration", DEFAULT_BUSY_WAIT_DURATION);
this.maxBusyWaitMultiplier = conf.getInt("hbase.busy.wait.multiplier.max", 2);
if (busyWaitDuration * maxBusyWaitMultiplier <= 0L) {
throw new IllegalArgumentException("Invalid hbase.busy.wait.duration ("
+ busyWaitDuration + ") or hbase.busy.wait.multiplier.max ("
+ maxBusyWaitMultiplier + "). Their product should be positive");
}
this.maxBusyWaitDuration = conf.getLong("hbase.ipc.client.call.purge.timeout",
2 * HConstants.DEFAULT_HBASE_RPC_TIMEOUT);
/*
* timestamp.slop provides a server-side constraint on the timestamp. This
* assumes that you base your TS around currentTimeMillis(). In this case,
* throw an error to the user if the user-specified TS is newer than now +
* slop. LATEST_TIMESTAMP == don't use this functionality
*/
this.timestampSlop = conf.getLong(
"hbase.hregion.keyvalue.timestamp.slop.millisecs",
HConstants.LATEST_TIMESTAMP);
/**
* Timeout for the process time in processRowsWithLocks().
* Use -1 to switch off time bound.
*/
this.rowProcessorTimeout = conf.getLong(
"hbase.hregion.row.processor.timeout", DEFAULT_ROW_PROCESSOR_TIMEOUT);
this.durability = htd.getDurability() == Durability.USE_DEFAULT
? DEFAULT_DURABILITY
: htd.getDurability();
if (rsServices != null) {
this.rsAccounting = this.rsServices.getRegionServerAccounting();
// don't initialize coprocessors if not running within a regionserver
// TODO: revisit if coprocessors should load in other cases
this.coprocessorHost = new RegionCoprocessorHost(this, rsServices, conf);
this.metricsRegionWrapper = new MetricsRegionWrapperImpl(this);
this.metricsRegion = new MetricsRegion(this.metricsRegionWrapper);
Map<String, Region> recoveringRegions = rsServices.getRecoveringRegions();
String encodedName = getRegionInfo().getEncodedName();
if (recoveringRegions != null && recoveringRegions.containsKey(encodedName)) {
this.isRecovering = true;
recoveringRegions.put(encodedName, this);
}
} else {
this.metricsRegionWrapper = null;
this.metricsRegion = null;
}
if (LOG.isDebugEnabled()) {
// Write out region name as string and its encoded name.
LOG.debug("Instantiated " + this);
}
// by default, we allow writes against a region when it's in recovering
this.disallowWritesInRecovering =
conf.getBoolean(HConstants.DISALLOW_WRITES_IN_RECOVERING,
HConstants.DEFAULT_DISALLOW_WRITES_IN_RECOVERING_CONFIG);
configurationManager = Optional.absent();
// disable stats tracking system tables, but check the config for everything else
this.regionStatsEnabled = htd.getTableName().getNamespaceAsString().equals(
NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR) ?
false :
conf.getBoolean(HConstants.ENABLE_CLIENT_BACKPRESSURE,
HConstants.DEFAULT_ENABLE_CLIENT_BACKPRESSURE);
}
void setHTableSpecificConf() {
if (this.htableDescriptor == null) return;
long flushSize = this.htableDescriptor.getMemStoreFlushSize();
if (flushSize <= 0) {
flushSize = conf.getLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE,
HTableDescriptor.DEFAULT_MEMSTORE_FLUSH_SIZE);
}
this.memstoreFlushSize = flushSize;
this.blockingMemStoreSize = this.memstoreFlushSize *
conf.getLong("hbase.hregion.memstore.block.multiplier", 2);
}
/**
* Initialize this region.
* Used only by tests and SplitTransaction to reopen the region.
* You should use createHRegion() or openHRegion()
* @return What the next sequence (edit) id should be.
* @throws IOException e
* @deprecated use HRegion.createHRegion() or HRegion.openHRegion()
*/
@Deprecated
public long initialize() throws IOException {
return initialize(null);
}
/**
* Initialize this region.
*
* @param reporter Tickle every so often if initialize is taking a while.
* @return What the next sequence (edit) id should be.
* @throws IOException e
*/
private long initialize(final CancelableProgressable reporter) throws IOException {
MonitoredTask status = TaskMonitor.get().createStatus("Initializing region " + this);
long nextSeqId = -1;
try {
nextSeqId = initializeRegionInternals(reporter, status);
return nextSeqId;
} finally {
// nextSeqid will be -1 if the initialization fails.
// At least it will be 0 otherwise.
if (nextSeqId == -1) {
status.abort("Exception during region " + getRegionInfo().getRegionNameAsString() +
" initialization.");
}
}
}
private long initializeRegionInternals(final CancelableProgressable reporter,
final MonitoredTask status) throws IOException {
if (coprocessorHost != null) {
status.setStatus("Running coprocessor pre-open hook");
coprocessorHost.preOpen();
}
// Write HRI to a file in case we need to recover hbase:meta
status.setStatus("Writing region info on filesystem");
fs.checkRegionInfoOnFilesystem();
// Initialize all the HStores
status.setStatus("Initializing all the Stores");
long maxSeqId = initializeRegionStores(reporter, status, false);
this.lastReplayedOpenRegionSeqId = maxSeqId;
this.writestate.setReadOnly(ServerRegionReplicaUtil.isReadOnly(this));
this.writestate.flushRequested = false;
this.writestate.compacting = 0;
if (this.writestate.writesEnabled) {
// Remove temporary data left over from old regions
status.setStatus("Cleaning up temporary data from old regions");
fs.cleanupTempDir();
}
if (this.writestate.writesEnabled) {
status.setStatus("Cleaning up detritus from prior splits");
// Get rid of any splits or merges that were lost in-progress. Clean out
// these directories here on open. We may be opening a region that was
// being split but we crashed in the middle of it all.
fs.cleanupAnySplitDetritus();
fs.cleanupMergesDir();
}
// Initialize split policy
this.splitPolicy = RegionSplitPolicy.create(this, conf);
// Initialize flush policy
this.flushPolicy = FlushPolicyFactory.create(this, conf);
long lastFlushTime = EnvironmentEdgeManager.currentTime();
for (Store store: stores.values()) {
this.lastStoreFlushTimeMap.put(store, lastFlushTime);
}
// Use maximum of log sequenceid or that which was found in stores
// (particularly if no recovered edits, seqid will be -1).
long nextSeqid = maxSeqId;
// In distributedLogReplay mode, we don't know the last change sequence number because region
// is opened before recovery completes. So we add a safety bumper to avoid new sequence number
// overlaps used sequence numbers
if (this.writestate.writesEnabled) {
nextSeqid = WALSplitter.writeRegionSequenceIdFile(this.fs.getFileSystem(), this.fs
.getRegionDir(), nextSeqid, (this.isRecovering ? (this.flushPerChanges + 10000000) : 1));
} else {
nextSeqid++;
}
LOG.info("Onlined " + this.getRegionInfo().getShortNameToLog() +
"; next sequenceid=" + nextSeqid);
// A region can be reopened if failed a split; reset flags
this.closing.set(false);
this.closed.set(false);
if (coprocessorHost != null) {
status.setStatus("Running coprocessor post-open hooks");
coprocessorHost.postOpen();
}
status.markComplete("Region opened successfully");
return nextSeqid;
}
private long initializeRegionStores(final CancelableProgressable reporter, MonitoredTask status,
boolean warmupOnly)
throws IOException {
// Load in all the HStores.
long maxSeqId = -1;
// initialized to -1 so that we pick up MemstoreTS from column families
long maxMemstoreTS = -1;
if (!htableDescriptor.getFamilies().isEmpty()) {
// initialize the thread pool for opening stores in parallel.
ThreadPoolExecutor storeOpenerThreadPool =
getStoreOpenAndCloseThreadPool("StoreOpener-" + this.getRegionInfo().getShortNameToLog());
CompletionService<HStore> completionService =
new ExecutorCompletionService<HStore>(storeOpenerThreadPool);
// initialize each store in parallel
for (final HColumnDescriptor family : htableDescriptor.getFamilies()) {
status.setStatus("Instantiating store for column family " + family);
completionService.submit(new Callable<HStore>() {
@Override
public HStore call() throws IOException {
return instantiateHStore(family);
}
});
}
boolean allStoresOpened = false;
try {
for (int i = 0; i < htableDescriptor.getFamilies().size(); i++) {
Future<HStore> future = completionService.take();
HStore store = future.get();
this.stores.put(store.getFamily().getName(), store);
long storeMaxSequenceId = store.getMaxSequenceId();
maxSeqIdInStores.put(store.getColumnFamilyName().getBytes(),
storeMaxSequenceId);
if (maxSeqId == -1 || storeMaxSequenceId > maxSeqId) {
maxSeqId = storeMaxSequenceId;
}
long maxStoreMemstoreTS = store.getMaxMemstoreTS();
if (maxStoreMemstoreTS > maxMemstoreTS) {
maxMemstoreTS = maxStoreMemstoreTS;
}
}
allStoresOpened = true;
} catch (InterruptedException e) {
throw (InterruptedIOException)new InterruptedIOException().initCause(e);
} catch (ExecutionException e) {
throw new IOException(e.getCause());
} finally {
storeOpenerThreadPool.shutdownNow();
if (!allStoresOpened) {
// something went wrong, close all opened stores
LOG.error("Could not initialize all stores for the region=" + this);
for (Store store : this.stores.values()) {
try {
store.close();
} catch (IOException e) {
LOG.warn(e.getMessage());
}
}
}
}
}
if (ServerRegionReplicaUtil.shouldReplayRecoveredEdits(this) && !warmupOnly) {
// Recover any edits if available.
maxSeqId = Math.max(maxSeqId, replayRecoveredEditsIfAny(
this.fs.getRegionDir(), maxSeqIdInStores, reporter, status));
}
maxSeqId = Math.max(maxSeqId, maxMemstoreTS + 1);
mvcc.initialize(maxSeqId);
return maxSeqId;
}
private void initializeWarmup(final CancelableProgressable reporter) throws IOException {
MonitoredTask status = TaskMonitor.get().createStatus("Initializing region " + this);
// Initialize all the HStores
status.setStatus("Warming up all the Stores");
initializeRegionStores(reporter, status, true);
}
private void writeRegionOpenMarker(WAL wal, long openSeqId) throws IOException {
Map<byte[], List<Path>> storeFiles = new TreeMap<byte[], List<Path>>(Bytes.BYTES_COMPARATOR);
for (Store store: getStores()) {
ArrayList<Path> storeFileNames = new ArrayList<Path>();
for (StoreFile storeFile: store.getStorefiles()) {
storeFileNames.add(storeFile.getPath());
}
storeFiles.put(store.getFamily().getName(), storeFileNames);
}
RegionEventDescriptor regionOpenDesc = ProtobufUtil.toRegionEventDescriptor(
RegionEventDescriptor.EventType.REGION_OPEN, getRegionInfo(), openSeqId,
getRegionServerServices().getServerName(), storeFiles);
WALUtil.writeRegionEventMarker(wal, getTableDesc(), getRegionInfo(), regionOpenDesc,
getSequenceId());
}
private void writeRegionCloseMarker(WAL wal) throws IOException {
Map<byte[], List<Path>> storeFiles = new TreeMap<byte[], List<Path>>(Bytes.BYTES_COMPARATOR);
for (Store store: getStores()) {
ArrayList<Path> storeFileNames = new ArrayList<Path>();
for (StoreFile storeFile: store.getStorefiles()) {
storeFileNames.add(storeFile.getPath());
}
storeFiles.put(store.getFamily().getName(), storeFileNames);
}
RegionEventDescriptor regionEventDesc = ProtobufUtil.toRegionEventDescriptor(
RegionEventDescriptor.EventType.REGION_CLOSE, getRegionInfo(), getSequenceId().get(),
getRegionServerServices().getServerName(), storeFiles);
WALUtil.writeRegionEventMarker(wal, getTableDesc(), getRegionInfo(), regionEventDesc,
getSequenceId());
// Store SeqId in HDFS when a region closes
// checking region folder exists is due to many tests which delete the table folder while a
// table is still online
if (this.fs.getFileSystem().exists(this.fs.getRegionDir())) {
WALSplitter.writeRegionSequenceIdFile(this.fs.getFileSystem(), this.fs.getRegionDir(),
getSequenceId().get(), 0);
}
}
/**
* @return True if this region has references.
*/
public boolean hasReferences() {
for (Store store : this.stores.values()) {
if (store.hasReferences()) return true;
}
return false;
}
@Override
public HDFSBlocksDistribution getHDFSBlocksDistribution() {
HDFSBlocksDistribution hdfsBlocksDistribution =
new HDFSBlocksDistribution();
synchronized (this.stores) {
for (Store store : this.stores.values()) {
for (StoreFile sf : store.getStorefiles()) {
HDFSBlocksDistribution storeFileBlocksDistribution =
sf.getHDFSBlockDistribution();
hdfsBlocksDistribution.add(storeFileBlocksDistribution);
}
}
}
return hdfsBlocksDistribution;
}
/**
* This is a helper function to compute HDFS block distribution on demand
* @param conf configuration
* @param tableDescriptor HTableDescriptor of the table
* @param regionInfo encoded name of the region
* @return The HDFS blocks distribution for the given region.
* @throws IOException
*/
public static HDFSBlocksDistribution computeHDFSBlocksDistribution(final Configuration conf,
final HTableDescriptor tableDescriptor, final HRegionInfo regionInfo) throws IOException {
Path tablePath = FSUtils.getTableDir(FSUtils.getRootDir(conf), tableDescriptor.getTableName());
return computeHDFSBlocksDistribution(conf, tableDescriptor, regionInfo, tablePath);
}
/**
* This is a helper function to compute HDFS block distribution on demand
* @param conf configuration
* @param tableDescriptor HTableDescriptor of the table
* @param regionInfo encoded name of the region
* @param tablePath the table directory
* @return The HDFS blocks distribution for the given region.
* @throws IOException
*/
public static HDFSBlocksDistribution computeHDFSBlocksDistribution(final Configuration conf,
final HTableDescriptor tableDescriptor, final HRegionInfo regionInfo, Path tablePath)
throws IOException {
HDFSBlocksDistribution hdfsBlocksDistribution = new HDFSBlocksDistribution();
FileSystem fs = tablePath.getFileSystem(conf);
HRegionFileSystem regionFs = new HRegionFileSystem(conf, fs, tablePath, regionInfo);
for (HColumnDescriptor family: tableDescriptor.getFamilies()) {
Collection<StoreFileInfo> storeFiles = regionFs.getStoreFiles(family.getNameAsString());
if (storeFiles == null) continue;
for (StoreFileInfo storeFileInfo : storeFiles) {
hdfsBlocksDistribution.add(storeFileInfo.computeHDFSBlocksDistribution(fs));
}
}
return hdfsBlocksDistribution;
}
/**
* Increase the size of mem store in this region and the size of global mem
* store
* @return the size of memstore in this region
*/
public long addAndGetGlobalMemstoreSize(long memStoreSize) {
if (this.rsAccounting != null) {
rsAccounting.addAndGetGlobalMemstoreSize(memStoreSize);
}
return this.memstoreSize.addAndGet(memStoreSize);
}
@Override
public HRegionInfo getRegionInfo() {
return this.fs.getRegionInfo();
}
/**
* @return Instance of {@link RegionServerServices} used by this HRegion.
* Can be null.
*/
RegionServerServices getRegionServerServices() {
return this.rsServices;
}
@Override
public long getReadRequestsCount() {
return readRequestsCount.get();
}
@Override
public void updateReadRequestsCount(long i) {
readRequestsCount.add(i);
}
@Override
public long getWriteRequestsCount() {
return writeRequestsCount.get();
}
@Override
public void updateWriteRequestsCount(long i) {
writeRequestsCount.add(i);
}
@Override
public long getMemstoreSize() {
return memstoreSize.get();
}
@Override
public long getNumMutationsWithoutWAL() {
return numMutationsWithoutWAL.get();
}
@Override
public long getDataInMemoryWithoutWAL() {
return dataInMemoryWithoutWAL.get();
}
@Override
public long getBlockedRequestsCount() {
return blockedRequestsCount.get();
}
@Override
public long getCheckAndMutateChecksPassed() {
return checkAndMutateChecksPassed.get();
}
@Override
public long getCheckAndMutateChecksFailed() {
return checkAndMutateChecksFailed.get();
}
@Override
public MetricsRegion getMetrics() {
return metricsRegion;
}
@Override
public boolean isClosed() {
return this.closed.get();
}
@Override
public boolean isClosing() {
return this.closing.get();
}
@Override
public boolean isReadOnly() {
return this.writestate.isReadOnly();
}
/**
* Reset recovering state of current region
*/
public void setRecovering(boolean newState) {
boolean wasRecovering = this.isRecovering;
// before we flip the recovering switch (enabling reads) we should write the region open
// event to WAL if needed
if (wal != null && getRegionServerServices() != null && !writestate.readOnly
&& wasRecovering && !newState) {
// force a flush only if region replication is set up for this region. Otherwise no need.
boolean forceFlush = getTableDesc().getRegionReplication() > 1;
// force a flush first
MonitoredTask status = TaskMonitor.get().createStatus(
"Flushing region " + this + " because recovery is finished");
try {
if (forceFlush) {
internalFlushcache(status);
}
status.setStatus("Writing region open event marker to WAL because recovery is finished");
try {
long seqId = openSeqNum;
// obtain a new seqId because we possibly have writes and flushes on top of openSeqNum
if (wal != null) {
seqId = getNextSequenceId(wal);
}
writeRegionOpenMarker(wal, seqId);
} catch (IOException e) {
// We cannot rethrow this exception since we are being called from the zk thread. The
// region has already opened. In this case we log the error, but continue
LOG.warn(getRegionInfo().getEncodedName() + " : was not able to write region opening "
+ "event to WAL, continueing", e);
}
} catch (IOException ioe) {
// Distributed log replay semantics does not necessarily require a flush, since the replayed
// data is already written again in the WAL. So failed flush should be fine.
LOG.warn(getRegionInfo().getEncodedName() + " : was not able to flush "
+ "event to WAL, continueing", ioe);
} finally {
status.cleanup();
}
}
this.isRecovering = newState;
if (wasRecovering && !isRecovering) {
// Call only when wal replay is over.
coprocessorHost.postLogReplay();
}
}
@Override
public boolean isRecovering() {
return this.isRecovering;
}
@Override
public boolean isAvailable() {
return !isClosed() && !isClosing();
}
/** @return true if region is splittable */
public boolean isSplittable() {
return isAvailable() && !hasReferences();
}
/**
* @return true if region is mergeable
*/
public boolean isMergeable() {
if (!isAvailable()) {
LOG.debug("Region " + getRegionInfo().getRegionNameAsString()
+ " is not mergeable because it is closing or closed");
return false;
}
if (hasReferences()) {
LOG.debug("Region " + getRegionInfo().getRegionNameAsString()
+ " is not mergeable because it has references");
return false;
}
return true;
}
public boolean areWritesEnabled() {
synchronized(this.writestate) {
return this.writestate.writesEnabled;
}
}
public MultiVersionConsistencyControl getMVCC() {
return mvcc;
}
@Override
public long getMaxFlushedSeqId() {
return maxFlushedSeqId;
}
@Override
public long getReadpoint(IsolationLevel isolationLevel) {
if (isolationLevel == IsolationLevel.READ_UNCOMMITTED) {
// This scan can read even uncommitted transactions
return Long.MAX_VALUE;
}
return mvcc.memstoreReadPoint();
}
@Override
public boolean isLoadingCfsOnDemandDefault() {
return this.isLoadingCfsOnDemandDefault;
}
/**
* Close down this HRegion. Flush the cache, shut down each HStore, don't
* service any more calls.
*
* <p>This method could take some time to execute, so don't call it from a
* time-sensitive thread.
*
* @return Vector of all the storage files that the HRegion's component
* HStores make use of. It's a list of all HStoreFile objects. Returns empty
* vector if already closed and null if judged that it should not close.
*
* @throws IOException e
*/
public Map<byte[], List<StoreFile>> close() throws IOException {
return close(false);
}
private final Object closeLock = new Object();
/** Conf key for the periodic flush interval */
public static final String MEMSTORE_PERIODIC_FLUSH_INTERVAL =
"hbase.regionserver.optionalcacheflushinterval";
/** Default interval for the memstore flush */
public static final int DEFAULT_CACHE_FLUSH_INTERVAL = 3600000;
public static final int META_CACHE_FLUSH_INTERVAL = 300000; // 5 minutes
/** Conf key to force a flush if there are already enough changes for one region in memstore */
public static final String MEMSTORE_FLUSH_PER_CHANGES =
"hbase.regionserver.flush.per.changes";
public static final long DEFAULT_FLUSH_PER_CHANGES = 30000000; // 30 millions
/**
* The following MAX_FLUSH_PER_CHANGES is large enough because each KeyValue has 20+ bytes
* overhead. Therefore, even 1G empty KVs occupy at least 20GB memstore size for a single region
*/
public static final long MAX_FLUSH_PER_CHANGES = 1000000000; // 1G
/**
* Close down this HRegion. Flush the cache unless abort parameter is true,
* Shut down each HStore, don't service any more calls.
*
* This method could take some time to execute, so don't call it from a
* time-sensitive thread.
*
* @param abort true if server is aborting (only during testing)
* @return Vector of all the storage files that the HRegion's component
* HStores make use of. It's a list of HStoreFile objects. Can be null if
* we are not to close at this time or we are already closed.
*
* @throws IOException e
*/
public Map<byte[], List<StoreFile>> close(final boolean abort) throws IOException {
// Only allow one thread to close at a time. Serialize them so dual
// threads attempting to close will run up against each other.
MonitoredTask status = TaskMonitor.get().createStatus(
"Closing region " + this +
(abort ? " due to abort" : ""));
status.setStatus("Waiting for close lock");
try {
synchronized (closeLock) {
return doClose(abort, status);
}
} finally {
status.cleanup();
}
}
private Map<byte[], List<StoreFile>> doClose(final boolean abort, MonitoredTask status)
throws IOException {
if (isClosed()) {
LOG.warn("Region " + this + " already closed");
return null;
}
if (coprocessorHost != null) {
status.setStatus("Running coprocessor pre-close hooks");
this.coprocessorHost.preClose(abort);
}
status.setStatus("Disabling compacts and flushes for region");
boolean canFlush = true;
synchronized (writestate) {
// Disable compacting and flushing by background threads for this
// region.
canFlush = !writestate.readOnly;
writestate.writesEnabled = false;
LOG.debug("Closing " + this + ": disabling compactions & flushes");
waitForFlushesAndCompactions();
}
// If we were not just flushing, is it worth doing a preflush...one
// that will clear out of the bulk of the memstore before we put up
// the close flag?
if (!abort && worthPreFlushing() && canFlush) {
status.setStatus("Pre-flushing region before close");
LOG.info("Running close preflush of " + getRegionInfo().getRegionNameAsString());
try {
internalFlushcache(status);
} catch (IOException ioe) {
// Failed to flush the region. Keep going.
status.setStatus("Failed pre-flush " + this + "; " + ioe.getMessage());
}
}
this.closing.set(true);
status.setStatus("Disabling writes for close");
// block waiting for the lock for closing
lock.writeLock().lock();
try {
if (this.isClosed()) {
status.abort("Already got closed by another process");
// SplitTransaction handles the null
return null;
}
LOG.debug("Updates disabled for region " + this);
// Don't flush the cache if we are aborting
if (!abort && canFlush) {
int flushCount = 0;
while (this.memstoreSize.get() > 0) {
try {
if (flushCount++ > 0) {
int actualFlushes = flushCount - 1;
if (actualFlushes > 5) {
// If we tried 5 times and are unable to clear memory, abort
// so we do not lose data
throw new DroppedSnapshotException("Failed clearing memory after " +
actualFlushes + " attempts on region: " +
Bytes.toStringBinary(getRegionInfo().getRegionName()));
}
LOG.info("Running extra flush, " + actualFlushes +
" (carrying snapshot?) " + this);
}
internalFlushcache(status);
} catch (IOException ioe) {
status.setStatus("Failed flush " + this + ", putting online again");
synchronized (writestate) {
writestate.writesEnabled = true;
}
// Have to throw to upper layers. I can't abort server from here.
throw ioe;
}
}
}
Map<byte[], List<StoreFile>> result =
new TreeMap<byte[], List<StoreFile>>(Bytes.BYTES_COMPARATOR);
if (!stores.isEmpty()) {
// initialize the thread pool for closing stores in parallel.
ThreadPoolExecutor storeCloserThreadPool =
getStoreOpenAndCloseThreadPool("StoreCloserThread-" +
getRegionInfo().getRegionNameAsString());
CompletionService<Pair<byte[], Collection<StoreFile>>> completionService =
new ExecutorCompletionService<Pair<byte[], Collection<StoreFile>>>(storeCloserThreadPool);
// close each store in parallel
for (final Store store : stores.values()) {
assert abort || store.getFlushableSize() == 0 || writestate.readOnly;
completionService
.submit(new Callable<Pair<byte[], Collection<StoreFile>>>() {
@Override
public Pair<byte[], Collection<StoreFile>> call() throws IOException {
return new Pair<byte[], Collection<StoreFile>>(
store.getFamily().getName(), store.close());
}
});
}
try {
for (int i = 0; i < stores.size(); i++) {
Future<Pair<byte[], Collection<StoreFile>>> future = completionService.take();
Pair<byte[], Collection<StoreFile>> storeFiles = future.get();
List<StoreFile> familyFiles = result.get(storeFiles.getFirst());
if (familyFiles == null) {
familyFiles = new ArrayList<StoreFile>();
result.put(storeFiles.getFirst(), familyFiles);
}
familyFiles.addAll(storeFiles.getSecond());
}
} catch (InterruptedException e) {
throw (InterruptedIOException)new InterruptedIOException().initCause(e);
} catch (ExecutionException e) {
throw new IOException(e.getCause());
} finally {
storeCloserThreadPool.shutdownNow();
}
}
status.setStatus("Writing region close event to WAL");
if (!abort && wal != null && getRegionServerServices() != null && !writestate.readOnly) {
writeRegionCloseMarker(wal);
}
this.closed.set(true);
if (!canFlush) {
addAndGetGlobalMemstoreSize(-memstoreSize.get());
} else if (memstoreSize.get() != 0) {
LOG.error("Memstore size is " + memstoreSize.get());
}
if (coprocessorHost != null) {
status.setStatus("Running coprocessor post-close hooks");
this.coprocessorHost.postClose(abort);
}
if (this.metricsRegion != null) {
this.metricsRegion.close();
}
if (this.metricsRegionWrapper != null) {
Closeables.closeQuietly(this.metricsRegionWrapper);
}
status.markComplete("Closed");
LOG.info("Closed " + this);
return result;
} finally {
lock.writeLock().unlock();
}
}
@Override
public void waitForFlushesAndCompactions() {
synchronized (writestate) {
if (this.writestate.readOnly) {
// we should not wait for replayed flushed if we are read only (for example in case the
// region is a secondary replica).
return;
}
boolean interrupted = false;
try {
while (writestate.compacting > 0 || writestate.flushing) {
LOG.debug("waiting for " + writestate.compacting + " compactions"
+ (writestate.flushing ? " & cache flush" : "") + " to complete for region " + this);
try {
writestate.wait();
} catch (InterruptedException iex) {
// essentially ignore and propagate the interrupt back up
LOG.warn("Interrupted while waiting");
interrupted = true;
}
}
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
}
protected ThreadPoolExecutor getStoreOpenAndCloseThreadPool(
final String threadNamePrefix) {
int numStores = Math.max(1, this.htableDescriptor.getFamilies().size());
int maxThreads = Math.min(numStores,
conf.getInt(HConstants.HSTORE_OPEN_AND_CLOSE_THREADS_MAX,
HConstants.DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX));
return getOpenAndCloseThreadPool(maxThreads, threadNamePrefix);
}
protected ThreadPoolExecutor getStoreFileOpenAndCloseThreadPool(
final String threadNamePrefix) {
int numStores = Math.max(1, this.htableDescriptor.getFamilies().size());
int maxThreads = Math.max(1,
conf.getInt(HConstants.HSTORE_OPEN_AND_CLOSE_THREADS_MAX,
HConstants.DEFAULT_HSTORE_OPEN_AND_CLOSE_THREADS_MAX)
/ numStores);
return getOpenAndCloseThreadPool(maxThreads, threadNamePrefix);
}
static ThreadPoolExecutor getOpenAndCloseThreadPool(int maxThreads,
final String threadNamePrefix) {
return Threads.getBoundedCachedThreadPool(maxThreads, 30L, TimeUnit.SECONDS,
new ThreadFactory() {
private int count = 1;
@Override
public Thread newThread(Runnable r) {
return new Thread(r, threadNamePrefix + "-" + count++);
}
});
}
/**
* @return True if its worth doing a flush before we put up the close flag.
*/
private boolean worthPreFlushing() {
return this.memstoreSize.get() >
this.conf.getLong("hbase.hregion.preclose.flush.size", 1024 * 1024 * 5);
}
//////////////////////////////////////////////////////////////////////////////
// HRegion accessors
//////////////////////////////////////////////////////////////////////////////
@Override
public HTableDescriptor getTableDesc() {
return this.htableDescriptor;
}
/** @return WAL in use for this region */
public WAL getWAL() {
return this.wal;
}
/**
* A split takes the config from the parent region & passes it to the daughter
* region's constructor. If 'conf' was passed, you would end up using the HTD
* of the parent region in addition to the new daughter HTD. Pass 'baseConf'
* to the daughter regions to avoid this tricky dedupe problem.
* @return Configuration object
*/
Configuration getBaseConf() {
return this.baseConf;
}
/** @return {@link FileSystem} being used by this region */
public FileSystem getFilesystem() {
return fs.getFileSystem();
}
/** @return the {@link HRegionFileSystem} used by this region */
public HRegionFileSystem getRegionFileSystem() {
return this.fs;
}
@Override
public long getEarliestFlushTimeForAllStores() {
return lastStoreFlushTimeMap.isEmpty() ? Long.MAX_VALUE : Collections.min(lastStoreFlushTimeMap
.values());
}
@Override
public long getOldestHfileTs(boolean majorCompactioOnly) throws IOException {
long result = Long.MAX_VALUE;
for (Store store : getStores()) {
for (StoreFile file : store.getStorefiles()) {
HFile.Reader reader = file.getReader().getHFileReader();
if (majorCompactioOnly) {
byte[] val = reader.loadFileInfo().get(StoreFile.MAJOR_COMPACTION_KEY);
if (val == null || !Bytes.toBoolean(val)) {
continue;
}
}
result = Math.min(result, reader.getFileContext().getFileCreateTime());
}
}
return result == Long.MAX_VALUE ? 0 : result;
}
RegionLoad.Builder setCompleteSequenceId(RegionLoad.Builder regionLoadBldr) {
long lastFlushOpSeqIdLocal = this.lastFlushOpSeqId;
byte[] encodedRegionName = this.getRegionInfo().getEncodedNameAsBytes();
regionLoadBldr.clearStoreCompleteSequenceId();
for (byte[] familyName : this.stores.keySet()) {
long oldestUnflushedSeqId = this.wal.getEarliestMemstoreSeqNum(encodedRegionName, familyName);
// no oldestUnflushedSeqId means no data has written to the store after last flush, so we use
// lastFlushOpSeqId as complete sequence id for the store.
regionLoadBldr.addStoreCompleteSequenceId(StoreSequenceId
.newBuilder()
.setFamilyName(ByteString.copyFrom(familyName))
.setSequenceId(
oldestUnflushedSeqId < 0 ? lastFlushOpSeqIdLocal : oldestUnflushedSeqId - 1).build());
}
return regionLoadBldr.setCompleteSequenceId(this.maxFlushedSeqId);
}
//////////////////////////////////////////////////////////////////////////////
// HRegion maintenance.
//
// These methods are meant to be called periodically by the HRegionServer for
// upkeep.
//////////////////////////////////////////////////////////////////////////////
/** @return returns size of largest HStore. */
public long getLargestHStoreSize() {
long size = 0;
for (Store h : stores.values()) {
long storeSize = h.getSize();
if (storeSize > size) {
size = storeSize;
}
}
return size;
}
/**
* @return KeyValue Comparator
*/
public KeyValue.KVComparator getComparator() {
return this.comparator;
}
/*
* Do preparation for pending compaction.
* @throws IOException
*/
protected void doRegionCompactionPrep() throws IOException {
}
@Override
public void triggerMajorCompaction() throws IOException {
for (Store s : getStores()) {
s.triggerMajorCompaction();
}
}
@Override
public void compact(final boolean majorCompaction) throws IOException {
if (majorCompaction) {
triggerMajorCompaction();
}
for (Store s : getStores()) {
CompactionContext compaction = s.requestCompaction();
if (compaction != null) {
CompactionThroughputController controller = null;
if (rsServices != null) {
controller = CompactionThroughputControllerFactory.create(rsServices, conf);
}
if (controller == null) {
controller = NoLimitCompactionThroughputController.INSTANCE;
}
compact(compaction, s, controller);
}
}
}
/**
* This is a helper function that compact all the stores synchronously
* It is used by utilities and testing
*
* @throws IOException e
*/
public void compactStores() throws IOException {
for (Store s : getStores()) {
CompactionContext compaction = s.requestCompaction();
if (compaction != null) {
compact(compaction, s, NoLimitCompactionThroughputController.INSTANCE);
}
}
}
/**
* This is a helper function that compact the given store
* It is used by utilities and testing
*
* @throws IOException e
*/
@VisibleForTesting
void compactStore(byte[] family, CompactionThroughputController throughputController)
throws IOException {
Store s = getStore(family);
CompactionContext compaction = s.requestCompaction();
if (compaction != null) {
compact(compaction, s, throughputController);
}
}
/*
* Called by compaction thread and after region is opened to compact the
* HStores if necessary.
*
* <p>This operation could block for a long time, so don't call it from a
* time-sensitive thread.
*
* Note that no locking is necessary at this level because compaction only
* conflicts with a region split, and that cannot happen because the region
* server does them sequentially and not in parallel.
*
* @param compaction Compaction details, obtained by requestCompaction()
* @return whether the compaction completed
*/
public boolean compact(CompactionContext compaction, Store store,
CompactionThroughputController throughputController) throws IOException {
assert compaction != null && compaction.hasSelection();
assert !compaction.getRequest().getFiles().isEmpty();
if (this.closing.get() || this.closed.get()) {
LOG.debug("Skipping compaction on " + this + " because closing/closed");
store.cancelRequestedCompaction(compaction);
return false;
}
MonitoredTask status = null;
boolean requestNeedsCancellation = true;
// block waiting for the lock for compaction
lock.readLock().lock();
try {
byte[] cf = Bytes.toBytes(store.getColumnFamilyName());
if (stores.get(cf) != store) {
LOG.warn("Store " + store.getColumnFamilyName() + " on region " + this
+ " has been re-instantiated, cancel this compaction request. "
+ " It may be caused by the roll back of split transaction");
return false;
}
status = TaskMonitor.get().createStatus("Compacting " + store + " in " + this);
if (this.closed.get()) {
String msg = "Skipping compaction on " + this + " because closed";
LOG.debug(msg);
status.abort(msg);
return false;
}
boolean wasStateSet = false;
try {
synchronized (writestate) {
if (writestate.writesEnabled) {
wasStateSet = true;
++writestate.compacting;
} else {
String msg = "NOT compacting region " + this + ". Writes disabled.";
LOG.info(msg);
status.abort(msg);
return false;
}
}
LOG.info("Starting compaction on " + store + " in region " + this
+ (compaction.getRequest().isOffPeak()?" as an off-peak compaction":""));
doRegionCompactionPrep();
try {
status.setStatus("Compacting store " + store);
// We no longer need to cancel the request on the way out of this
// method because Store#compact will clean up unconditionally
requestNeedsCancellation = false;
store.compact(compaction, throughputController);
} catch (InterruptedIOException iioe) {
String msg = "compaction interrupted";
LOG.info(msg, iioe);
status.abort(msg);
return false;
}
} finally {
if (wasStateSet) {
synchronized (writestate) {
--writestate.compacting;
if (writestate.compacting <= 0) {
writestate.notifyAll();
}
}
}
}
status.markComplete("Compaction complete");
return true;
} finally {
try {
if (requestNeedsCancellation) store.cancelRequestedCompaction(compaction);
if (status != null) status.cleanup();
} finally {
lock.readLock().unlock();
}
}
}
@Override
public FlushResult flush(boolean force) throws IOException {
return flushcache(force, false);
}
/**
* Flush the cache.
*
* When this method is called the cache will be flushed unless:
* <ol>
* <li>the cache is empty</li>
* <li>the region is closed.</li>
* <li>a flush is already in progress</li>
* <li>writes are disabled</li>
* </ol>
*
* <p>This method may block for some time, so it should not be called from a
* time-sensitive thread.
* @param forceFlushAllStores whether we want to flush all stores
* @param writeFlushRequestWalMarker whether to write the flush request marker to WAL
* @return whether the flush is success and whether the region needs compacting
*
* @throws IOException general io exceptions
* @throws DroppedSnapshotException Thrown when replay of wal is required
* because a Snapshot was not properly persisted.
*/
public FlushResult flushcache(boolean forceFlushAllStores, boolean writeFlushRequestWalMarker)
throws IOException {
// fail-fast instead of waiting on the lock
if (this.closing.get()) {
String msg = "Skipping flush on " + this + " because closing";
LOG.debug(msg);
return new FlushResultImpl(FlushResult.Result.CANNOT_FLUSH, msg, false);
}
MonitoredTask status = TaskMonitor.get().createStatus("Flushing " + this);
status.setStatus("Acquiring readlock on region");
// block waiting for the lock for flushing cache
lock.readLock().lock();
try {
if (this.closed.get()) {
String msg = "Skipping flush on " + this + " because closed";
LOG.debug(msg);
status.abort(msg);
return new FlushResultImpl(FlushResult.Result.CANNOT_FLUSH, msg, false);
}
if (coprocessorHost != null) {
status.setStatus("Running coprocessor pre-flush hooks");
coprocessorHost.preFlush();
}
// TODO: this should be managed within memstore with the snapshot, updated only after flush
// successful
if (numMutationsWithoutWAL.get() > 0) {
numMutationsWithoutWAL.set(0);
dataInMemoryWithoutWAL.set(0);
}
synchronized (writestate) {
if (!writestate.flushing && writestate.writesEnabled) {
this.writestate.flushing = true;
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("NOT flushing memstore for region " + this
+ ", flushing=" + writestate.flushing + ", writesEnabled="
+ writestate.writesEnabled);
}
String msg = "Not flushing since "
+ (writestate.flushing ? "already flushing"
: "writes not enabled");
status.abort(msg);
return new FlushResultImpl(FlushResult.Result.CANNOT_FLUSH, msg, false);
}
}
try {
Collection<Store> specificStoresToFlush =
forceFlushAllStores ? stores.values() : flushPolicy.selectStoresToFlush();
FlushResult fs = internalFlushcache(specificStoresToFlush,
status, writeFlushRequestWalMarker);
if (coprocessorHost != null) {
status.setStatus("Running post-flush coprocessor hooks");
coprocessorHost.postFlush();
}
status.markComplete("Flush successful");
return fs;
} finally {
synchronized (writestate) {
writestate.flushing = false;
this.writestate.flushRequested = false;
writestate.notifyAll();
}
}
} finally {
lock.readLock().unlock();
status.cleanup();
}
}
/**
* Should the store be flushed because it is old enough.
* <p>
* Every FlushPolicy should call this to determine whether a store is old enough to flush(except
* that you always flush all stores). Otherwise the {@link #shouldFlush()} method will always
* returns true which will make a lot of flush requests.
*/
boolean shouldFlushStore(Store store) {
long maxFlushedSeqId =
this.wal.getEarliestMemstoreSeqNum(getRegionInfo().getEncodedNameAsBytes(), store
.getFamily().getName()) - 1;
if (maxFlushedSeqId > 0 && maxFlushedSeqId + flushPerChanges < sequenceId.get()) {
if (LOG.isDebugEnabled()) {
LOG.debug("Column Family: " + store.getColumnFamilyName() + " of region " + this
+ " will be flushed because its max flushed seqId(" + maxFlushedSeqId
+ ") is far away from current(" + sequenceId.get() + "), max allowed is "
+ flushPerChanges);
}
return true;
}
if (flushCheckInterval <= 0) {
return false;
}
long now = EnvironmentEdgeManager.currentTime();
if (store.timeOfOldestEdit() < now - flushCheckInterval) {
if (LOG.isDebugEnabled()) {
LOG.debug("Column Family: " + store.getColumnFamilyName() + " of region " + this
+ " will be flushed because time of its oldest edit (" + store.timeOfOldestEdit()
+ ") is far away from now(" + now + "), max allowed is " + flushCheckInterval);
}
return true;
}
return false;
}
/**
* Should the memstore be flushed now
*/
boolean shouldFlush() {
// This is a rough measure.
if (this.maxFlushedSeqId > 0
&& (this.maxFlushedSeqId + this.flushPerChanges < this.sequenceId.get())) {
return true;
}
long modifiedFlushCheckInterval = flushCheckInterval;
if (getRegionInfo().isMetaRegion() &&
getRegionInfo().getReplicaId() == HRegionInfo.DEFAULT_REPLICA_ID) {
modifiedFlushCheckInterval = META_CACHE_FLUSH_INTERVAL;
}
if (modifiedFlushCheckInterval <= 0) { //disabled
return false;
}
long now = EnvironmentEdgeManager.currentTime();
//if we flushed in the recent past, we don't need to do again now
if ((now - getEarliestFlushTimeForAllStores() < modifiedFlushCheckInterval)) {
return false;
}
//since we didn't flush in the recent past, flush now if certain conditions
//are met. Return true on first such memstore hit.
for (Store s : getStores()) {
if (s.timeOfOldestEdit() < now - modifiedFlushCheckInterval) {
// we have an old enough edit in the memstore, flush
return true;
}
}
return false;
}
/**
* Flushing all stores.
*
* @see #internalFlushcache(Collection, MonitoredTask, boolean)
*/
private FlushResult internalFlushcache(MonitoredTask status)
throws IOException {
return internalFlushcache(stores.values(), status, false);
}
/**
* Flushing given stores.
*
* @see #internalFlushcache(WAL, long, Collection, MonitoredTask, boolean)
*/
private FlushResult internalFlushcache(final Collection<Store> storesToFlush,
MonitoredTask status, boolean writeFlushWalMarker) throws IOException {
return internalFlushcache(this.wal, HConstants.NO_SEQNUM, storesToFlush,
status, writeFlushWalMarker);
}
/**
* Flush the memstore. Flushing the memstore is a little tricky. We have a lot
* of updates in the memstore, all of which have also been written to the wal.
* We need to write those updates in the memstore out to disk, while being
* able to process reads/writes as much as possible during the flush
* operation.
* <p>
* This method may block for some time. Every time you call it, we up the
* regions sequence id even if we don't flush; i.e. the returned region id
* will be at least one larger than the last edit applied to this region. The
* returned id does not refer to an actual edit. The returned id can be used
* for say installing a bulk loaded file just ahead of the last hfile that was
* the result of this flush, etc.
*
* @param wal
* Null if we're NOT to go via wal.
* @param myseqid
* The seqid to use if <code>wal</code> is null writing out flush
* file.
* @param storesToFlush
* The list of stores to flush.
* @return object describing the flush's state
* @throws IOException
* general io exceptions
* @throws DroppedSnapshotException
* Thrown when replay of wal is required because a Snapshot was not
* properly persisted.
*/
protected FlushResult internalFlushcache(final WAL wal, final long myseqid,
final Collection<Store> storesToFlush, MonitoredTask status, boolean writeFlushWalMarker)
throws IOException {
PrepareFlushResult result
= internalPrepareFlushCache(wal, myseqid, storesToFlush, status, writeFlushWalMarker);
if (result.result == null) {
return internalFlushCacheAndCommit(wal, status, result, storesToFlush);
} else {
return result.result; // early exit due to failure from prepare stage
}
}
protected PrepareFlushResult internalPrepareFlushCache(
final WAL wal, final long myseqid, final Collection<Store> storesToFlush,
MonitoredTask status, boolean writeFlushWalMarker)
throws IOException {
if (this.rsServices != null && this.rsServices.isAborted()) {
// Don't flush when server aborting, it's unsafe
throw new IOException("Aborting flush because server is aborted...");
}
final long startTime = EnvironmentEdgeManager.currentTime();
// If nothing to flush, return, but we need to safely update the region sequence id
if (this.memstoreSize.get() <= 0) {
// Take an update lock because am about to change the sequence id and we want the sequence id
// to be at the border of the empty memstore.
MultiVersionConsistencyControl.WriteEntry w = null;
this.updatesLock.writeLock().lock();
try {
if (this.memstoreSize.get() <= 0) {
// Presume that if there are still no edits in the memstore, then there are no edits for
// this region out in the WAL subsystem so no need to do any trickery clearing out
// edits in the WAL system. Up the sequence number so the resulting flush id is for
// sure just beyond the last appended region edit (useful as a marker when bulk loading,
// etc.)
// wal can be null replaying edits.
if (wal != null) {
w = mvcc.beginMemstoreInsert();
long flushOpSeqId = getNextSequenceId(wal);
FlushResult flushResult = new FlushResultImpl(
FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY, flushOpSeqId, "Nothing to flush",
writeFlushRequestMarkerToWAL(wal, writeFlushWalMarker));
w.setWriteNumber(flushOpSeqId);
mvcc.waitForPreviousTransactionsComplete(w);
w = null;
return new PrepareFlushResult(flushResult, myseqid);
} else {
return new PrepareFlushResult(
new FlushResultImpl(FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY,
"Nothing to flush", false),
myseqid);
}
}
} finally {
this.updatesLock.writeLock().unlock();
if (w != null) {
mvcc.advanceMemstore(w);
}
}
}
if (LOG.isInfoEnabled()) {
LOG.info("Started memstore flush for " + this + ", current region memstore size "
+ StringUtils.byteDesc(this.memstoreSize.get()) + ", and " + storesToFlush.size() + "/"
+ stores.size() + " column families' memstores are being flushed."
+ ((wal != null) ? "" : "; wal is null, using passed sequenceid=" + myseqid));
// only log when we are not flushing all stores.
if (this.stores.size() > storesToFlush.size()) {
for (Store store: storesToFlush) {
LOG.info("Flushing Column Family: " + store.getColumnFamilyName()
+ " which was occupying "
+ StringUtils.byteDesc(store.getMemStoreSize()) + " of memstore.");
}
}
}
// Stop updates while we snapshot the memstore of all of these regions' stores. We only have
// to do this for a moment. It is quick. We also set the memstore size to zero here before we
// allow updates again so its value will represent the size of the updates received
// during flush
MultiVersionConsistencyControl.WriteEntry w = null;
// We have to take an update lock during snapshot, or else a write could end up in both snapshot
// and memstore (makes it difficult to do atomic rows then)
status.setStatus("Obtaining lock to block concurrent updates");
// block waiting for the lock for internal flush
this.updatesLock.writeLock().lock();
status.setStatus("Preparing to flush by snapshotting stores in " +
getRegionInfo().getEncodedName());
long totalFlushableSizeOfFlushableStores = 0;
Set<byte[]> flushedFamilyNames = new HashSet<byte[]>();
for (Store store: storesToFlush) {
flushedFamilyNames.add(store.getFamily().getName());
}
TreeMap<byte[], StoreFlushContext> storeFlushCtxs
= new TreeMap<byte[], StoreFlushContext>(Bytes.BYTES_COMPARATOR);
TreeMap<byte[], List<Path>> committedFiles = new TreeMap<byte[], List<Path>>(
Bytes.BYTES_COMPARATOR);
// The sequence id of this flush operation which is used to log FlushMarker and pass to
// createFlushContext to use as the store file's sequence id.
long flushOpSeqId = HConstants.NO_SEQNUM;
// The max flushed sequence id after this flush operation. Used as completeSequenceId which is
// passed to HMaster.
long flushedSeqId = HConstants.NO_SEQNUM;
byte[] encodedRegionName = getRegionInfo().getEncodedNameAsBytes();
long trxId = 0;
try {
try {
w = mvcc.beginMemstoreInsert();
if (wal != null) {
if (!wal.startCacheFlush(encodedRegionName, flushedFamilyNames)) {
// This should never happen.
String msg = "Flush will not be started for ["
+ this.getRegionInfo().getEncodedName() + "] - because the WAL is closing.";
status.setStatus(msg);
return new PrepareFlushResult(
new FlushResultImpl(FlushResult.Result.CANNOT_FLUSH, msg, false),
myseqid);
}
flushOpSeqId = getNextSequenceId(wal);
long oldestUnflushedSeqId = wal.getEarliestMemstoreSeqNum(encodedRegionName);
// no oldestUnflushedSeqId means we flushed all stores.
// or the unflushed stores are all empty.
flushedSeqId = (oldestUnflushedSeqId == HConstants.NO_SEQNUM) ? flushOpSeqId
: oldestUnflushedSeqId - 1;
} else {
// use the provided sequence Id as WAL is not being used for this flush.
flushedSeqId = flushOpSeqId = myseqid;
}
for (Store s : storesToFlush) {
totalFlushableSizeOfFlushableStores += s.getFlushableSize();
storeFlushCtxs.put(s.getFamily().getName(), s.createFlushContext(flushOpSeqId));
committedFiles.put(s.getFamily().getName(), null); // for writing stores to WAL
}
// write the snapshot start to WAL
if (wal != null && !writestate.readOnly) {
FlushDescriptor desc = ProtobufUtil.toFlushDescriptor(FlushAction.START_FLUSH,
getRegionInfo(), flushOpSeqId, committedFiles);
// no sync. Sync is below where we do not hold the updates lock
trxId = WALUtil.writeFlushMarker(wal, this.htableDescriptor, getRegionInfo(),
desc, sequenceId, false);
}
// Prepare flush (take a snapshot)
for (StoreFlushContext flush : storeFlushCtxs.values()) {
flush.prepare();
}
} catch (IOException ex) {
if (wal != null) {
if (trxId > 0) { // check whether we have already written START_FLUSH to WAL
try {
FlushDescriptor desc = ProtobufUtil.toFlushDescriptor(FlushAction.ABORT_FLUSH,
getRegionInfo(), flushOpSeqId, committedFiles);
WALUtil.writeFlushMarker(wal, this.htableDescriptor, getRegionInfo(),
desc, sequenceId, false);
} catch (Throwable t) {
LOG.warn("Received unexpected exception trying to write ABORT_FLUSH marker to WAL:" +
StringUtils.stringifyException(t));
// ignore this since we will be aborting the RS with DSE.
}
}
// we have called wal.startCacheFlush(), now we have to abort it
wal.abortCacheFlush(this.getRegionInfo().getEncodedNameAsBytes());
throw ex; // let upper layers deal with it.
}
} finally {
this.updatesLock.writeLock().unlock();
}
String s = "Finished memstore snapshotting " + this +
", syncing WAL and waiting on mvcc, flushsize=" + totalFlushableSizeOfFlushableStores;
status.setStatus(s);
if (LOG.isTraceEnabled()) LOG.trace(s);
// sync unflushed WAL changes
// see HBASE-8208 for details
if (wal != null) {
try {
wal.sync(); // ensure that flush marker is sync'ed
} catch (IOException ioe) {
LOG.warn("Unexpected exception while wal.sync(), ignoring. Exception: "
+ StringUtils.stringifyException(ioe));
}
}
// wait for all in-progress transactions to commit to WAL before
// we can start the flush. This prevents
// uncommitted transactions from being written into HFiles.
// We have to block before we start the flush, otherwise keys that
// were removed via a rollbackMemstore could be written to Hfiles.
w.setWriteNumber(flushOpSeqId);
mvcc.waitForPreviousTransactionsComplete(w);
// set w to null to prevent mvcc.advanceMemstore from being called again inside finally block
w = null;
} finally {
if (w != null) {
// in case of failure just mark current w as complete
mvcc.advanceMemstore(w);
}
}
return new PrepareFlushResult(storeFlushCtxs, committedFiles, startTime, flushOpSeqId,
flushedSeqId, totalFlushableSizeOfFlushableStores);
}
/**
* Writes a marker to WAL indicating a flush is requested but cannot be complete due to various
* reasons. Ignores exceptions from WAL. Returns whether the write succeeded.
* @param wal
* @return whether WAL write was successful
*/
private boolean writeFlushRequestMarkerToWAL(WAL wal, boolean writeFlushWalMarker) {
if (writeFlushWalMarker && wal != null && !writestate.readOnly) {
FlushDescriptor desc = ProtobufUtil.toFlushDescriptor(FlushAction.CANNOT_FLUSH,
getRegionInfo(), -1, new TreeMap<byte[], List<Path>>());
try {
WALUtil.writeFlushMarker(wal, this.htableDescriptor, getRegionInfo(),
desc, sequenceId, true);
return true;
} catch (IOException e) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received exception while trying to write the flush request to wal", e);
}
}
return false;
}
protected FlushResult internalFlushCacheAndCommit(
final WAL wal, MonitoredTask status, final PrepareFlushResult prepareResult,
final Collection<Store> storesToFlush)
throws IOException {
// prepare flush context is carried via PrepareFlushResult
TreeMap<byte[], StoreFlushContext> storeFlushCtxs = prepareResult.storeFlushCtxs;
TreeMap<byte[], List<Path>> committedFiles = prepareResult.committedFiles;
long startTime = prepareResult.startTime;
long flushOpSeqId = prepareResult.flushOpSeqId;
long flushedSeqId = prepareResult.flushedSeqId;
long totalFlushableSizeOfFlushableStores = prepareResult.totalFlushableSize;
String s = "Flushing stores of " + this;
status.setStatus(s);
if (LOG.isTraceEnabled()) LOG.trace(s);
// Any failure from here on out will be catastrophic requiring server
// restart so wal content can be replayed and put back into the memstore.
// Otherwise, the snapshot content while backed up in the wal, it will not
// be part of the current running servers state.
boolean compactionRequested = false;
try {
// A. Flush memstore to all the HStores.
// Keep running vector of all store files that includes both old and the
// just-made new flush store file. The new flushed file is still in the
// tmp directory.
for (StoreFlushContext flush : storeFlushCtxs.values()) {
flush.flushCache(status);
}
// Switch snapshot (in memstore) -> new hfile (thus causing
// all the store scanners to reset/reseek).
Iterator<Store> it = storesToFlush.iterator();
// stores.values() and storeFlushCtxs have same order
for (StoreFlushContext flush : storeFlushCtxs.values()) {
boolean needsCompaction = flush.commit(status);
if (needsCompaction) {
compactionRequested = true;
}
committedFiles.put(it.next().getFamily().getName(), flush.getCommittedFiles());
}
storeFlushCtxs.clear();
// Set down the memstore size by amount of flush.
this.addAndGetGlobalMemstoreSize(-totalFlushableSizeOfFlushableStores);
if (wal != null) {
// write flush marker to WAL. If fail, we should throw DroppedSnapshotException
FlushDescriptor desc = ProtobufUtil.toFlushDescriptor(FlushAction.COMMIT_FLUSH,
getRegionInfo(), flushOpSeqId, committedFiles);
WALUtil.writeFlushMarker(wal, this.htableDescriptor, getRegionInfo(),
desc, sequenceId, true);
}
} catch (Throwable t) {
// An exception here means that the snapshot was not persisted.
// The wal needs to be replayed so its content is restored to memstore.
// Currently, only a server restart will do this.
// We used to only catch IOEs but its possible that we'd get other
// exceptions -- e.g. HBASE-659 was about an NPE -- so now we catch
// all and sundry.
if (wal != null) {
try {
FlushDescriptor desc = ProtobufUtil.toFlushDescriptor(FlushAction.ABORT_FLUSH,
getRegionInfo(), flushOpSeqId, committedFiles);
WALUtil.writeFlushMarker(wal, this.htableDescriptor, getRegionInfo(),
desc, sequenceId, false);
} catch (Throwable ex) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received unexpected exception trying to write ABORT_FLUSH marker to WAL:"
+ StringUtils.stringifyException(ex));
// ignore this since we will be aborting the RS with DSE.
}
wal.abortCacheFlush(this.getRegionInfo().getEncodedNameAsBytes());
}
DroppedSnapshotException dse = new DroppedSnapshotException("region: " +
Bytes.toStringBinary(getRegionInfo().getRegionName()));
dse.initCause(t);
status.abort("Flush failed: " + StringUtils.stringifyException(t));
throw dse;
}
// If we get to here, the HStores have been written.
if (wal != null) {
wal.completeCacheFlush(this.getRegionInfo().getEncodedNameAsBytes());
}
// Record latest flush time
for (Store store: storesToFlush) {
this.lastStoreFlushTimeMap.put(store, startTime);
}
// Update the oldest unflushed sequence id for region.
this.maxFlushedSeqId = flushedSeqId;
// Record flush operation sequence id.
this.lastFlushOpSeqId = flushOpSeqId;
// C. Finally notify anyone waiting on memstore to clear:
// e.g. checkResources().
synchronized (this) {
notifyAll(); // FindBugs NN_NAKED_NOTIFY
}
long time = EnvironmentEdgeManager.currentTime() - startTime;
long memstoresize = this.memstoreSize.get();
String msg = "Finished memstore flush of ~"
+ StringUtils.byteDesc(totalFlushableSizeOfFlushableStores) + "/"
+ totalFlushableSizeOfFlushableStores + ", currentsize="
+ StringUtils.byteDesc(memstoresize) + "/" + memstoresize
+ " for region " + this + " in " + time + "ms, sequenceid="
+ flushOpSeqId + ", compaction requested=" + compactionRequested
+ ((wal == null) ? "; wal=null" : "");
LOG.info(msg);
status.setStatus(msg);
return new FlushResultImpl(compactionRequested ?
FlushResult.Result.FLUSHED_COMPACTION_NEEDED :
FlushResult.Result.FLUSHED_NO_COMPACTION_NEEDED,
flushOpSeqId);
}
/**
* Method to safely get the next sequence number.
* @return Next sequence number unassociated with any actual edit.
* @throws IOException
*/
@VisibleForTesting
protected long getNextSequenceId(final WAL wal) throws IOException {
WALKey key = this.appendEmptyEdit(wal, null);
return key.getSequenceId();
}
//////////////////////////////////////////////////////////////////////////////
// get() methods for client use.
//////////////////////////////////////////////////////////////////////////////
@Override
public Result getClosestRowBefore(final byte [] row, final byte [] family) throws IOException {
if (coprocessorHost != null) {
Result result = new Result();
if (coprocessorHost.preGetClosestRowBefore(row, family, result)) {
return result;
}
}
// look across all the HStores for this region and determine what the
// closest key is across all column families, since the data may be sparse
checkRow(row, "getClosestRowBefore");
startRegionOperation(Operation.GET);
this.readRequestsCount.increment();
try {
Store store = getStore(family);
// get the closest key. (HStore.getRowKeyAtOrBefore can return null)
Cell key = store.getRowKeyAtOrBefore(row);
Result result = null;
if (key != null) {
Get get = new Get(CellUtil.cloneRow(key));
get.addFamily(family);
result = get(get);
}
if (coprocessorHost != null) {
coprocessorHost.postGetClosestRowBefore(row, family, result);
}
return result;
} finally {
closeRegionOperation(Operation.GET);
}
}
@Override
public RegionScanner getScanner(Scan scan) throws IOException {
return getScanner(scan, null);
}
protected RegionScanner getScanner(Scan scan,
List<KeyValueScanner> additionalScanners) throws IOException {
startRegionOperation(Operation.SCAN);
try {
// Verify families are all valid
if (!scan.hasFamilies()) {
// Adding all families to scanner
for (byte[] family: this.htableDescriptor.getFamiliesKeys()) {
scan.addFamily(family);
}
} else {
for (byte [] family : scan.getFamilyMap().keySet()) {
checkFamily(family);
}
}
return instantiateRegionScanner(scan, additionalScanners);
} finally {
closeRegionOperation(Operation.SCAN);
}
}
protected RegionScanner instantiateRegionScanner(Scan scan,
List<KeyValueScanner> additionalScanners) throws IOException {
if (scan.isReversed()) {
if (scan.getFilter() != null) {
scan.getFilter().setReversed(true);
}
return new ReversedRegionScannerImpl(scan, additionalScanners, this);
}
return new RegionScannerImpl(scan, additionalScanners, this);
}
@Override
public void prepareDelete(Delete delete) throws IOException {
// Check to see if this is a deleteRow insert
if(delete.getFamilyCellMap().isEmpty()){
for(byte [] family : this.htableDescriptor.getFamiliesKeys()){
// Don't eat the timestamp
delete.addFamily(family, delete.getTimeStamp());
}
} else {
for(byte [] family : delete.getFamilyCellMap().keySet()) {
if(family == null) {
throw new NoSuchColumnFamilyException("Empty family is invalid");
}
checkFamily(family);
}
}
}
@Override
public void delete(Delete delete) throws IOException {
checkReadOnly();
checkResources();
startRegionOperation(Operation.DELETE);
try {
delete.getRow();
// All edits for the given row (across all column families) must happen atomically.
doBatchMutate(delete);
} finally {
closeRegionOperation(Operation.DELETE);
}
}
/**
* Row needed by below method.
*/
private static final byte [] FOR_UNIT_TESTS_ONLY = Bytes.toBytes("ForUnitTestsOnly");
/**
* This is used only by unit tests. Not required to be a public API.
* @param familyMap map of family to edits for the given family.
* @throws IOException
*/
void delete(NavigableMap<byte[], List<Cell>> familyMap,
Durability durability) throws IOException {
Delete delete = new Delete(FOR_UNIT_TESTS_ONLY);
delete.setFamilyCellMap(familyMap);
delete.setDurability(durability);
doBatchMutate(delete);
}
@Override
public void prepareDeleteTimestamps(Mutation mutation, Map<byte[], List<Cell>> familyMap,
byte[] byteNow) throws IOException {
for (Map.Entry<byte[], List<Cell>> e : familyMap.entrySet()) {
byte[] family = e.getKey();
List<Cell> cells = e.getValue();
assert cells instanceof RandomAccess;
Map<byte[], Integer> kvCount = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
int listSize = cells.size();
for (int i=0; i < listSize; i++) {
Cell cell = cells.get(i);
// Check if time is LATEST, change to time of most recent addition if so
// This is expensive.
if (cell.getTimestamp() == HConstants.LATEST_TIMESTAMP && CellUtil.isDeleteType(cell)) {
byte[] qual = CellUtil.cloneQualifier(cell);
if (qual == null) qual = HConstants.EMPTY_BYTE_ARRAY;
Integer count = kvCount.get(qual);
if (count == null) {
kvCount.put(qual, 1);
} else {
kvCount.put(qual, count + 1);
}
count = kvCount.get(qual);
Get get = new Get(CellUtil.cloneRow(cell));
get.setMaxVersions(count);
get.addColumn(family, qual);
if (coprocessorHost != null) {
if (!coprocessorHost.prePrepareTimeStampForDeleteVersion(mutation, cell,
byteNow, get)) {
updateDeleteLatestVersionTimeStamp(cell, get, count, byteNow);
}
} else {
updateDeleteLatestVersionTimeStamp(cell, get, count, byteNow);
}
} else {
CellUtil.updateLatestStamp(cell, byteNow, 0);
}
}
}
}
void updateDeleteLatestVersionTimeStamp(Cell cell, Get get, int count, byte[] byteNow)
throws IOException {
List<Cell> result = get(get, false);
if (result.size() < count) {
// Nothing to delete
CellUtil.updateLatestStamp(cell, byteNow, 0);
return;
}
if (result.size() > count) {
throw new RuntimeException("Unexpected size: " + result.size());
}
Cell getCell = result.get(count - 1);
CellUtil.setTimestamp(cell, getCell.getTimestamp());
}
@Override
public void put(Put put) throws IOException {
checkReadOnly();
// Do a rough check that we have resources to accept a write. The check is
// 'rough' in that between the resource check and the call to obtain a
// read lock, resources may run out. For now, the thought is that this
// will be extremely rare; we'll deal with it when it happens.
checkResources();
startRegionOperation(Operation.PUT);
try {
// All edits for the given row (across all column families) must happen atomically.
doBatchMutate(put);
} finally {
closeRegionOperation(Operation.PUT);
}
}
/**
* Struct-like class that tracks the progress of a batch operation,
* accumulating status codes and tracking the index at which processing
* is proceeding.
*/
private abstract static class BatchOperationInProgress<T> {
T[] operations;
int nextIndexToProcess = 0;
OperationStatus[] retCodeDetails;
WALEdit[] walEditsFromCoprocessors;
public BatchOperationInProgress(T[] operations) {
this.operations = operations;
this.retCodeDetails = new OperationStatus[operations.length];
this.walEditsFromCoprocessors = new WALEdit[operations.length];
Arrays.fill(this.retCodeDetails, OperationStatus.NOT_RUN);
}
public abstract Mutation getMutation(int index);
public abstract long getNonceGroup(int index);
public abstract long getNonce(int index);
/** This method is potentially expensive and should only be used for non-replay CP path. */
public abstract Mutation[] getMutationsForCoprocs();
public abstract boolean isInReplay();
public abstract long getReplaySequenceId();
public boolean isDone() {
return nextIndexToProcess == operations.length;
}
}
private static class MutationBatch extends BatchOperationInProgress<Mutation> {
private long nonceGroup;
private long nonce;
public MutationBatch(Mutation[] operations, long nonceGroup, long nonce) {
super(operations);
this.nonceGroup = nonceGroup;
this.nonce = nonce;
}
@Override
public Mutation getMutation(int index) {
return this.operations[index];
}
@Override
public long getNonceGroup(int index) {
return nonceGroup;
}
@Override
public long getNonce(int index) {
return nonce;
}
@Override
public Mutation[] getMutationsForCoprocs() {
return this.operations;
}
@Override
public boolean isInReplay() {
return false;
}
@Override
public long getReplaySequenceId() {
return 0;
}
}
private static class ReplayBatch extends BatchOperationInProgress<MutationReplay> {
private long replaySeqId = 0;
public ReplayBatch(MutationReplay[] operations, long seqId) {
super(operations);
this.replaySeqId = seqId;
}
@Override
public Mutation getMutation(int index) {
return this.operations[index].mutation;
}
@Override
public long getNonceGroup(int index) {
return this.operations[index].nonceGroup;
}
@Override
public long getNonce(int index) {
return this.operations[index].nonce;
}
@Override
public Mutation[] getMutationsForCoprocs() {
assert false;
throw new RuntimeException("Should not be called for replay batch");
}
@Override
public boolean isInReplay() {
return true;
}
@Override
public long getReplaySequenceId() {
return this.replaySeqId;
}
}
@Override
public OperationStatus[] batchMutate(Mutation[] mutations, long nonceGroup, long nonce)
throws IOException {
// As it stands, this is used for 3 things
// * batchMutate with single mutation - put/delete, separate or from checkAndMutate.
// * coprocessor calls (see ex. BulkDeleteEndpoint).
// So nonces are not really ever used by HBase. They could be by coprocs, and checkAnd...
return batchMutate(new MutationBatch(mutations, nonceGroup, nonce));
}
public OperationStatus[] batchMutate(Mutation[] mutations) throws IOException {
return batchMutate(mutations, HConstants.NO_NONCE, HConstants.NO_NONCE);
}
@Override
public OperationStatus[] batchReplay(MutationReplay[] mutations, long replaySeqId)
throws IOException {
if (!RegionReplicaUtil.isDefaultReplica(getRegionInfo())
&& replaySeqId < lastReplayedOpenRegionSeqId) {
// if it is a secondary replica we should ignore these entries silently
// since they are coming out of order
if (LOG.isTraceEnabled()) {
LOG.trace(getRegionInfo().getEncodedName() + " : "
+ "Skipping " + mutations.length + " mutations with replaySeqId=" + replaySeqId
+ " which is < than lastReplayedOpenRegionSeqId=" + lastReplayedOpenRegionSeqId);
for (MutationReplay mut : mutations) {
LOG.trace(getRegionInfo().getEncodedName() + " : Skipping : " + mut.mutation);
}
}
OperationStatus[] statuses = new OperationStatus[mutations.length];
for (int i = 0; i < statuses.length; i++) {
statuses[i] = OperationStatus.SUCCESS;
}
return statuses;
}
return batchMutate(new ReplayBatch(mutations, replaySeqId));
}
/**
* Perform a batch of mutations.
* It supports only Put and Delete mutations and will ignore other types passed.
* @param batchOp contains the list of mutations
* @return an array of OperationStatus which internally contains the
* OperationStatusCode and the exceptionMessage if any.
* @throws IOException
*/
OperationStatus[] batchMutate(BatchOperationInProgress<?> batchOp) throws IOException {
boolean initialized = false;
Operation op = batchOp.isInReplay() ? Operation.REPLAY_BATCH_MUTATE : Operation.BATCH_MUTATE;
startRegionOperation(op);
try {
while (!batchOp.isDone()) {
if (!batchOp.isInReplay()) {
checkReadOnly();
}
checkResources();
if (!initialized) {
this.writeRequestsCount.add(batchOp.operations.length);
if (!batchOp.isInReplay()) {
doPreMutationHook(batchOp);
}
initialized = true;
}
long addedSize = doMiniBatchMutation(batchOp);
long newSize = this.addAndGetGlobalMemstoreSize(addedSize);
if (isFlushSize(newSize)) {
requestFlush();
}
}
} finally {
closeRegionOperation(op);
}
return batchOp.retCodeDetails;
}
private void doPreMutationHook(BatchOperationInProgress<?> batchOp)
throws IOException {
/* Run coprocessor pre hook outside of locks to avoid deadlock */
WALEdit walEdit = new WALEdit();
if (coprocessorHost != null) {
for (int i = 0 ; i < batchOp.operations.length; i++) {
Mutation m = batchOp.getMutation(i);
if (m instanceof Put) {
if (coprocessorHost.prePut((Put) m, walEdit, m.getDurability())) {
// pre hook says skip this Put
// mark as success and skip in doMiniBatchMutation
batchOp.retCodeDetails[i] = OperationStatus.SUCCESS;
}
} else if (m instanceof Delete) {
Delete curDel = (Delete) m;
if (curDel.getFamilyCellMap().isEmpty()) {
// handle deleting a row case
prepareDelete(curDel);
}
if (coprocessorHost.preDelete(curDel, walEdit, m.getDurability())) {
// pre hook says skip this Delete
// mark as success and skip in doMiniBatchMutation
batchOp.retCodeDetails[i] = OperationStatus.SUCCESS;
}
} else {
// In case of passing Append mutations along with the Puts and Deletes in batchMutate
// mark the operation return code as failure so that it will not be considered in
// the doMiniBatchMutation
batchOp.retCodeDetails[i] = new OperationStatus(OperationStatusCode.FAILURE,
"Put/Delete mutations only supported in batchMutate() now");
}
if (!walEdit.isEmpty()) {
batchOp.walEditsFromCoprocessors[i] = walEdit;
walEdit = new WALEdit();
}
}
}
}
@SuppressWarnings("unchecked")
private long doMiniBatchMutation(BatchOperationInProgress<?> batchOp) throws IOException {
boolean isInReplay = batchOp.isInReplay();
// variable to note if all Put items are for the same CF -- metrics related
boolean putsCfSetConsistent = true;
//The set of columnFamilies first seen for Put.
Set<byte[]> putsCfSet = null;
// variable to note if all Delete items are for the same CF -- metrics related
boolean deletesCfSetConsistent = true;
//The set of columnFamilies first seen for Delete.
Set<byte[]> deletesCfSet = null;
long currentNonceGroup = HConstants.NO_NONCE, currentNonce = HConstants.NO_NONCE;
WALEdit walEdit = new WALEdit(isInReplay);
MultiVersionConsistencyControl.WriteEntry w = null;
long txid = 0;
boolean doRollBackMemstore = false;
boolean locked = false;
/** Keep track of the locks we hold so we can release them in finally clause */
List<RowLock> acquiredRowLocks = Lists.newArrayListWithCapacity(batchOp.operations.length);
// reference family maps directly so coprocessors can mutate them if desired
Map<byte[], List<Cell>>[] familyMaps = new Map[batchOp.operations.length];
List<Cell> memstoreCells = new ArrayList<Cell>();
// We try to set up a batch in the range [firstIndex,lastIndexExclusive)
int firstIndex = batchOp.nextIndexToProcess;
int lastIndexExclusive = firstIndex;
boolean success = false;
int noOfPuts = 0, noOfDeletes = 0;
WALKey walKey = null;
long mvccNum = 0;
try {
// ------------------------------------
// STEP 1. Try to acquire as many locks as we can, and ensure
// we acquire at least one.
// ----------------------------------
int numReadyToWrite = 0;
long now = EnvironmentEdgeManager.currentTime();
while (lastIndexExclusive < batchOp.operations.length) {
Mutation mutation = batchOp.getMutation(lastIndexExclusive);
boolean isPutMutation = mutation instanceof Put;
Map<byte[], List<Cell>> familyMap = mutation.getFamilyCellMap();
// store the family map reference to allow for mutations
familyMaps[lastIndexExclusive] = familyMap;
// skip anything that "ran" already
if (batchOp.retCodeDetails[lastIndexExclusive].getOperationStatusCode()
!= OperationStatusCode.NOT_RUN) {
lastIndexExclusive++;
continue;
}
try {
if (isPutMutation) {
// Check the families in the put. If bad, skip this one.
if (isInReplay) {
removeNonExistentColumnFamilyForReplay(familyMap);
} else {
checkFamilies(familyMap.keySet());
}
checkTimestamps(mutation.getFamilyCellMap(), now);
} else {
prepareDelete((Delete) mutation);
}
} catch (NoSuchColumnFamilyException nscf) {
LOG.warn("No such column family in batch mutation", nscf);
batchOp.retCodeDetails[lastIndexExclusive] = new OperationStatus(
OperationStatusCode.BAD_FAMILY, nscf.getMessage());
lastIndexExclusive++;
continue;
} catch (FailedSanityCheckException fsce) {
LOG.warn("Batch Mutation did not pass sanity check", fsce);
batchOp.retCodeDetails[lastIndexExclusive] = new OperationStatus(
OperationStatusCode.SANITY_CHECK_FAILURE, fsce.getMessage());
lastIndexExclusive++;
continue;
}
// If we haven't got any rows in our batch, we should block to
// get the next one.
boolean shouldBlock = numReadyToWrite == 0;
RowLock rowLock = null;
try {
rowLock = getRowLockInternal(mutation.getRow(), shouldBlock);
} catch (IOException ioe) {
LOG.warn("Failed getting lock in batch put, row="
+ Bytes.toStringBinary(mutation.getRow()), ioe);
}
if (rowLock == null) {
// We failed to grab another lock
assert !shouldBlock : "Should never fail to get lock when blocking";
break; // stop acquiring more rows for this batch
} else {
acquiredRowLocks.add(rowLock);
}
lastIndexExclusive++;
numReadyToWrite++;
if (isPutMutation) {
// If Column Families stay consistent through out all of the
// individual puts then metrics can be reported as a mutliput across
// column families in the first put.
if (putsCfSet == null) {
putsCfSet = mutation.getFamilyCellMap().keySet();
} else {
putsCfSetConsistent = putsCfSetConsistent
&& mutation.getFamilyCellMap().keySet().equals(putsCfSet);
}
} else {
if (deletesCfSet == null) {
deletesCfSet = mutation.getFamilyCellMap().keySet();
} else {
deletesCfSetConsistent = deletesCfSetConsistent
&& mutation.getFamilyCellMap().keySet().equals(deletesCfSet);
}
}
}
// we should record the timestamp only after we have acquired the rowLock,
// otherwise, newer puts/deletes are not guaranteed to have a newer timestamp
now = EnvironmentEdgeManager.currentTime();
byte[] byteNow = Bytes.toBytes(now);
// Nothing to put/delete -- an exception in the above such as NoSuchColumnFamily?
if (numReadyToWrite <= 0) return 0L;
// We've now grabbed as many mutations off the list as we can
// ------------------------------------
// STEP 2. Update any LATEST_TIMESTAMP timestamps
// ----------------------------------
for (int i = firstIndex; !isInReplay && i < lastIndexExclusive; i++) {
// skip invalid
if (batchOp.retCodeDetails[i].getOperationStatusCode()
!= OperationStatusCode.NOT_RUN) continue;
Mutation mutation = batchOp.getMutation(i);
if (mutation instanceof Put) {
updateCellTimestamps(familyMaps[i].values(), byteNow);
noOfPuts++;
} else {
prepareDeleteTimestamps(mutation, familyMaps[i], byteNow);
noOfDeletes++;
}
rewriteCellTags(familyMaps[i], mutation);
}
lock(this.updatesLock.readLock(), numReadyToWrite);
locked = true;
if(isInReplay) {
mvccNum = batchOp.getReplaySequenceId();
} else {
mvccNum = MultiVersionConsistencyControl.getPreAssignedWriteNumber(this.sequenceId);
}
//
// ------------------------------------
// Acquire the latest mvcc number
// ----------------------------------
w = mvcc.beginMemstoreInsertWithSeqNum(mvccNum);
// calling the pre CP hook for batch mutation
if (!isInReplay && coprocessorHost != null) {
MiniBatchOperationInProgress<Mutation> miniBatchOp =
new MiniBatchOperationInProgress<Mutation>(batchOp.getMutationsForCoprocs(),
batchOp.retCodeDetails, batchOp.walEditsFromCoprocessors, firstIndex, lastIndexExclusive);
if (coprocessorHost.preBatchMutate(miniBatchOp)) return 0L;
}
// ------------------------------------
// STEP 3. Write back to memstore
// Write to memstore. It is ok to write to memstore
// first without updating the WAL because we do not roll
// forward the memstore MVCC. The MVCC will be moved up when
// the complete operation is done. These changes are not yet
// visible to scanners till we update the MVCC. The MVCC is
// moved only when the sync is complete.
// ----------------------------------
long addedSize = 0;
for (int i = firstIndex; i < lastIndexExclusive; i++) {
if (batchOp.retCodeDetails[i].getOperationStatusCode()
!= OperationStatusCode.NOT_RUN) {
continue;
}
doRollBackMemstore = true; // If we have a failure, we need to clean what we wrote
addedSize += applyFamilyMapToMemstore(familyMaps[i], mvccNum, memstoreCells, isInReplay);
}
// ------------------------------------
// STEP 4. Build WAL edit
// ----------------------------------
Durability durability = Durability.USE_DEFAULT;
for (int i = firstIndex; i < lastIndexExclusive; i++) {
// Skip puts that were determined to be invalid during preprocessing
if (batchOp.retCodeDetails[i].getOperationStatusCode()
!= OperationStatusCode.NOT_RUN) {
continue;
}
batchOp.retCodeDetails[i] = OperationStatus.SUCCESS;
Mutation m = batchOp.getMutation(i);
Durability tmpDur = getEffectiveDurability(m.getDurability());
if (tmpDur.ordinal() > durability.ordinal()) {
durability = tmpDur;
}
if (tmpDur == Durability.SKIP_WAL) {
recordMutationWithoutWal(m.getFamilyCellMap());
continue;
}
long nonceGroup = batchOp.getNonceGroup(i), nonce = batchOp.getNonce(i);
// In replay, the batch may contain multiple nonces. If so, write WALEdit for each.
// Given how nonces are originally written, these should be contiguous.
// They don't have to be, it will still work, just write more WALEdits than needed.
if (nonceGroup != currentNonceGroup || nonce != currentNonce) {
if (walEdit.size() > 0) {
assert isInReplay;
if (!isInReplay) {
throw new IOException("Multiple nonces per batch and not in replay");
}
// txid should always increase, so having the one from the last call is ok.
// we use HLogKey here instead of WALKey directly to support legacy coprocessors.
walKey = new ReplayHLogKey(this.getRegionInfo().getEncodedNameAsBytes(),
this.htableDescriptor.getTableName(), now, m.getClusterIds(),
currentNonceGroup, currentNonce);
txid = this.wal.append(this.htableDescriptor, this.getRegionInfo(), walKey,
walEdit, getSequenceId(), true, null);
walEdit = new WALEdit(isInReplay);
walKey = null;
}
currentNonceGroup = nonceGroup;
currentNonce = nonce;
}
// Add WAL edits by CP
WALEdit fromCP = batchOp.walEditsFromCoprocessors[i];
if (fromCP != null) {
for (Cell cell : fromCP.getCells()) {
walEdit.add(cell);
}
}
addFamilyMapToWALEdit(familyMaps[i], walEdit);
}
// -------------------------
// STEP 5. Append the final edit to WAL. Do not sync wal.
// -------------------------
Mutation mutation = batchOp.getMutation(firstIndex);
if (isInReplay) {
// use wal key from the original
walKey = new ReplayHLogKey(this.getRegionInfo().getEncodedNameAsBytes(),
this.htableDescriptor.getTableName(), WALKey.NO_SEQUENCE_ID, now,
mutation.getClusterIds(), currentNonceGroup, currentNonce);
long replaySeqId = batchOp.getReplaySequenceId();
walKey.setOrigLogSeqNum(replaySeqId);
// ensure that the sequence id of the region is at least as big as orig log seq id
while (true) {
long seqId = getSequenceId().get();
if (seqId >= replaySeqId) break;
if (getSequenceId().compareAndSet(seqId, replaySeqId)) break;
}
}
if (walEdit.size() > 0) {
if (!isInReplay) {
// we use HLogKey here instead of WALKey directly to support legacy coprocessors.
walKey = new HLogKey(this.getRegionInfo().getEncodedNameAsBytes(),
this.htableDescriptor.getTableName(), WALKey.NO_SEQUENCE_ID, now,
mutation.getClusterIds(), currentNonceGroup, currentNonce);
}
txid = this.wal.append(this.htableDescriptor, this.getRegionInfo(), walKey, walEdit,
getSequenceId(), true, memstoreCells);
}
if(walKey == null){
// Append a faked WALEdit in order for SKIP_WAL updates to get mvcc assigned
walKey = this.appendEmptyEdit(this.wal, memstoreCells);
}
// -------------------------------
// STEP 6. Release row locks, etc.
// -------------------------------
if (locked) {
this.updatesLock.readLock().unlock();
locked = false;
}
releaseRowLocks(acquiredRowLocks);
// -------------------------
// STEP 7. Sync wal.
// -------------------------
if (txid != 0) {
syncOrDefer(txid, durability);
}
doRollBackMemstore = false;
// calling the post CP hook for batch mutation
if (!isInReplay && coprocessorHost != null) {
MiniBatchOperationInProgress<Mutation> miniBatchOp =
new MiniBatchOperationInProgress<Mutation>(batchOp.getMutationsForCoprocs(),
batchOp.retCodeDetails, batchOp.walEditsFromCoprocessors, firstIndex, lastIndexExclusive);
coprocessorHost.postBatchMutate(miniBatchOp);
}
// ------------------------------------------------------------------
// STEP 8. Advance mvcc. This will make this put visible to scanners and getters.
// ------------------------------------------------------------------
if (w != null) {
mvcc.completeMemstoreInsertWithSeqNum(w, walKey);
w = null;
}
// ------------------------------------
// STEP 9. Run coprocessor post hooks. This should be done after the wal is
// synced so that the coprocessor contract is adhered to.
// ------------------------------------
if (!isInReplay && coprocessorHost != null) {
for (int i = firstIndex; i < lastIndexExclusive; i++) {
// only for successful puts
if (batchOp.retCodeDetails[i].getOperationStatusCode()
!= OperationStatusCode.SUCCESS) {
continue;
}
Mutation m = batchOp.getMutation(i);
if (m instanceof Put) {
coprocessorHost.postPut((Put) m, walEdit, m.getDurability());
} else {
coprocessorHost.postDelete((Delete) m, walEdit, m.getDurability());
}
}
}
success = true;
return addedSize;
} finally {
// if the wal sync was unsuccessful, remove keys from memstore
if (doRollBackMemstore) {
rollbackMemstore(memstoreCells);
}
if (w != null) {
mvcc.completeMemstoreInsertWithSeqNum(w, walKey);
}
if (locked) {
this.updatesLock.readLock().unlock();
}
releaseRowLocks(acquiredRowLocks);
// See if the column families were consistent through the whole thing.
// if they were then keep them. If they were not then pass a null.
// null will be treated as unknown.
// Total time taken might be involving Puts and Deletes.
// Split the time for puts and deletes based on the total number of Puts and Deletes.
if (noOfPuts > 0) {
// There were some Puts in the batch.
if (this.metricsRegion != null) {
this.metricsRegion.updatePut();
}
}
if (noOfDeletes > 0) {
// There were some Deletes in the batch.
if (this.metricsRegion != null) {
this.metricsRegion.updateDelete();
}
}
if (!success) {
for (int i = firstIndex; i < lastIndexExclusive; i++) {
if (batchOp.retCodeDetails[i].getOperationStatusCode() == OperationStatusCode.NOT_RUN) {
batchOp.retCodeDetails[i] = OperationStatus.FAILURE;
}
}
}
if (coprocessorHost != null && !batchOp.isInReplay()) {
// call the coprocessor hook to do any finalization steps
// after the put is done
MiniBatchOperationInProgress<Mutation> miniBatchOp =
new MiniBatchOperationInProgress<Mutation>(batchOp.getMutationsForCoprocs(),
batchOp.retCodeDetails, batchOp.walEditsFromCoprocessors, firstIndex,
lastIndexExclusive);
coprocessorHost.postBatchMutateIndispensably(miniBatchOp, success);
}
batchOp.nextIndexToProcess = lastIndexExclusive;
}
}
/**
* Returns effective durability from the passed durability and
* the table descriptor.
*/
protected Durability getEffectiveDurability(Durability d) {
return d == Durability.USE_DEFAULT ? this.durability : d;
}
//TODO, Think that gets/puts and deletes should be refactored a bit so that
//the getting of the lock happens before, so that you would just pass it into
//the methods. So in the case of checkAndMutate you could just do lockRow,
//get, put, unlockRow or something
@Override
public boolean checkAndMutate(byte [] row, byte [] family, byte [] qualifier,
CompareOp compareOp, ByteArrayComparable comparator, Mutation w,
boolean writeToWAL)
throws IOException{
checkReadOnly();
//TODO, add check for value length or maybe even better move this to the
//client if this becomes a global setting
checkResources();
boolean isPut = w instanceof Put;
if (!isPut && !(w instanceof Delete))
throw new org.apache.hadoop.hbase.DoNotRetryIOException("Action must " +
"be Put or Delete");
if (!Bytes.equals(row, w.getRow())) {
throw new org.apache.hadoop.hbase.DoNotRetryIOException("Action's " +
"getRow must match the passed row");
}
startRegionOperation();
try {
Get get = new Get(row);
checkFamily(family);
get.addColumn(family, qualifier);
// Lock row - note that doBatchMutate will relock this row if called
RowLock rowLock = getRowLock(get.getRow());
// wait for all previous transactions to complete (with lock held)
mvcc.waitForPreviousTransactionsComplete();
try {
if (this.getCoprocessorHost() != null) {
Boolean processed = null;
if (w instanceof Put) {
processed = this.getCoprocessorHost().preCheckAndPutAfterRowLock(row, family,
qualifier, compareOp, comparator, (Put) w);
} else if (w instanceof Delete) {
processed = this.getCoprocessorHost().preCheckAndDeleteAfterRowLock(row, family,
qualifier, compareOp, comparator, (Delete) w);
}
if (processed != null) {
return processed;
}
}
List<Cell> result = get(get, false);
boolean valueIsNull = comparator.getValue() == null ||
comparator.getValue().length == 0;
boolean matches = false;
if (result.size() == 0 && valueIsNull) {
matches = true;
} else if (result.size() > 0 && result.get(0).getValueLength() == 0 &&
valueIsNull) {
matches = true;
} else if (result.size() == 1 && !valueIsNull) {
Cell kv = result.get(0);
int compareResult = comparator.compareTo(kv.getValueArray(),
kv.getValueOffset(), kv.getValueLength());
switch (compareOp) {
case LESS:
matches = compareResult < 0;
break;
case LESS_OR_EQUAL:
matches = compareResult <= 0;
break;
case EQUAL:
matches = compareResult == 0;
break;
case NOT_EQUAL:
matches = compareResult != 0;
break;
case GREATER_OR_EQUAL:
matches = compareResult >= 0;
break;
case GREATER:
matches = compareResult > 0;
break;
default:
throw new RuntimeException("Unknown Compare op " + compareOp.name());
}
}
//If matches put the new put or delete the new delete
if (matches) {
// All edits for the given row (across all column families) must
// happen atomically.
doBatchMutate(w);
this.checkAndMutateChecksPassed.increment();
return true;
}
this.checkAndMutateChecksFailed.increment();
return false;
} finally {
rowLock.release();
}
} finally {
closeRegionOperation();
}
}
//TODO, Think that gets/puts and deletes should be refactored a bit so that
//the getting of the lock happens before, so that you would just pass it into
//the methods. So in the case of checkAndMutate you could just do lockRow,
//get, put, unlockRow or something
@Override
public boolean checkAndRowMutate(byte [] row, byte [] family, byte [] qualifier,
CompareOp compareOp, ByteArrayComparable comparator, RowMutations rm,
boolean writeToWAL) throws IOException {
checkReadOnly();
//TODO, add check for value length or maybe even better move this to the
//client if this becomes a global setting
checkResources();
startRegionOperation();
try {
Get get = new Get(row);
checkFamily(family);
get.addColumn(family, qualifier);
// Lock row - note that doBatchMutate will relock this row if called
RowLock rowLock = getRowLock(get.getRow());
// wait for all previous transactions to complete (with lock held)
mvcc.waitForPreviousTransactionsComplete();
try {
List<Cell> result = get(get, false);
boolean valueIsNull = comparator.getValue() == null ||
comparator.getValue().length == 0;
boolean matches = false;
if (result.size() == 0 && valueIsNull) {
matches = true;
} else if (result.size() > 0 && result.get(0).getValueLength() == 0 &&
valueIsNull) {
matches = true;
} else if (result.size() == 1 && !valueIsNull) {
Cell kv = result.get(0);
int compareResult = comparator.compareTo(kv.getValueArray(),
kv.getValueOffset(), kv.getValueLength());
switch (compareOp) {
case LESS:
matches = compareResult < 0;
break;
case LESS_OR_EQUAL:
matches = compareResult <= 0;
break;
case EQUAL:
matches = compareResult == 0;
break;
case NOT_EQUAL:
matches = compareResult != 0;
break;
case GREATER_OR_EQUAL:
matches = compareResult >= 0;
break;
case GREATER:
matches = compareResult > 0;
break;
default:
throw new RuntimeException("Unknown Compare op " + compareOp.name());
}
}
//If matches put the new put or delete the new delete
if (matches) {
// All edits for the given row (across all column families) must
// happen atomically.
mutateRow(rm);
this.checkAndMutateChecksPassed.increment();
return true;
}
this.checkAndMutateChecksFailed.increment();
return false;
} finally {
rowLock.release();
}
} finally {
closeRegionOperation();
}
}
private void doBatchMutate(Mutation mutation) throws IOException {
// Currently this is only called for puts and deletes, so no nonces.
OperationStatus[] batchMutate = this.batchMutate(new Mutation[] { mutation });
if (batchMutate[0].getOperationStatusCode().equals(OperationStatusCode.SANITY_CHECK_FAILURE)) {
throw new FailedSanityCheckException(batchMutate[0].getExceptionMsg());
} else if (batchMutate[0].getOperationStatusCode().equals(OperationStatusCode.BAD_FAMILY)) {
throw new NoSuchColumnFamilyException(batchMutate[0].getExceptionMsg());
}
}
/**
* Complete taking the snapshot on the region. Writes the region info and adds references to the
* working snapshot directory.
*
* TODO for api consistency, consider adding another version with no {@link ForeignExceptionSnare}
* arg. (In the future other cancellable HRegion methods could eventually add a
* {@link ForeignExceptionSnare}, or we could do something fancier).
*
* @param desc snapshot description object
* @param exnSnare ForeignExceptionSnare that captures external exceptions in case we need to
* bail out. This is allowed to be null and will just be ignored in that case.
* @throws IOException if there is an external or internal error causing the snapshot to fail
*/
public void addRegionToSnapshot(SnapshotDescription desc,
ForeignExceptionSnare exnSnare) throws IOException {
Path rootDir = FSUtils.getRootDir(conf);
Path snapshotDir = SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir);
SnapshotManifest manifest = SnapshotManifest.create(conf, getFilesystem(),
snapshotDir, desc, exnSnare);
manifest.addRegion(this);
}
@Override
public void updateCellTimestamps(final Iterable<List<Cell>> cellItr, final byte[] now)
throws IOException {
for (List<Cell> cells: cellItr) {
if (cells == null) continue;
assert cells instanceof RandomAccess;
int listSize = cells.size();
for (int i = 0; i < listSize; i++) {
CellUtil.updateLatestStamp(cells.get(i), now, 0);
}
}
}
/**
* Possibly rewrite incoming cell tags.
*/
void rewriteCellTags(Map<byte[], List<Cell>> familyMap, final Mutation m) {
// Check if we have any work to do and early out otherwise
// Update these checks as more logic is added here
if (m.getTTL() == Long.MAX_VALUE) {
return;
}
// From this point we know we have some work to do
for (Map.Entry<byte[], List<Cell>> e: familyMap.entrySet()) {
List<Cell> cells = e.getValue();
assert cells instanceof RandomAccess;
int listSize = cells.size();
for (int i = 0; i < listSize; i++) {
Cell cell = cells.get(i);
List<Tag> newTags = new ArrayList<Tag>();
Iterator<Tag> tagIterator = CellUtil.tagsIterator(cell.getTagsArray(),
cell.getTagsOffset(), cell.getTagsLength());
// Carry forward existing tags
while (tagIterator.hasNext()) {
// Add any filters or tag specific rewrites here
newTags.add(tagIterator.next());
}
// Cell TTL handling
// Check again if we need to add a cell TTL because early out logic
// above may change when there are more tag based features in core.
if (m.getTTL() != Long.MAX_VALUE) {
// Add a cell TTL tag
newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(m.getTTL())));
}
// Rewrite the cell with the updated set of tags
cells.set(i, new KeyValue(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),
cell.getTimestamp(), KeyValue.Type.codeToType(cell.getTypeByte()),
cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(),
newTags));
}
}
}
/*
* Check if resources to support an update.
*
* We throw RegionTooBusyException if above memstore limit
* and expect client to retry using some kind of backoff
*/
private void checkResources() throws RegionTooBusyException {
// If catalog region, do not impose resource constraints or block updates.
if (this.getRegionInfo().isMetaRegion()) return;
if (this.memstoreSize.get() > this.blockingMemStoreSize) {
blockedRequestsCount.increment();
requestFlush();
throw new RegionTooBusyException("Above memstore limit, " +
"regionName=" + (this.getRegionInfo() == null ? "unknown" :
this.getRegionInfo().getRegionNameAsString()) +
", server=" + (this.getRegionServerServices() == null ? "unknown" :
this.getRegionServerServices().getServerName()) +
", memstoreSize=" + memstoreSize.get() +
", blockingMemStoreSize=" + blockingMemStoreSize);
}
}
/**
* @throws IOException Throws exception if region is in read-only mode.
*/
protected void checkReadOnly() throws IOException {
if (isReadOnly()) {
throw new IOException("region is read only");
}
}
protected void checkReadsEnabled() throws IOException {
if (!this.writestate.readsEnabled) {
throw new IOException(getRegionInfo().getEncodedName()
+ ": The region's reads are disabled. Cannot serve the request");
}
}
public void setReadsEnabled(boolean readsEnabled) {
if (readsEnabled && !this.writestate.readsEnabled) {
LOG.info(getRegionInfo().getEncodedName() + " : Enabling reads for region.");
}
this.writestate.setReadsEnabled(readsEnabled);
}
/**
* Add updates first to the wal and then add values to memstore.
* Warning: Assumption is caller has lock on passed in row.
* @param edits Cell updates by column
* @throws IOException
*/
private void put(final byte [] row, byte [] family, List<Cell> edits)
throws IOException {
NavigableMap<byte[], List<Cell>> familyMap;
familyMap = new TreeMap<byte[], List<Cell>>(Bytes.BYTES_COMPARATOR);
familyMap.put(family, edits);
Put p = new Put(row);
p.setFamilyCellMap(familyMap);
doBatchMutate(p);
}
/**
* Atomically apply the given map of family->edits to the memstore.
* This handles the consistency control on its own, but the caller
* should already have locked updatesLock.readLock(). This also does
* <b>not</b> check the families for validity.
*
* @param familyMap Map of kvs per family
* @param localizedWriteEntry The WriteEntry of the MVCC for this transaction.
* If null, then this method internally creates a mvcc transaction.
* @param output newly added KVs into memstore
* @param isInReplay true when adding replayed KVs into memstore
* @return the additional memory usage of the memstore caused by the
* new entries.
* @throws IOException
*/
private long applyFamilyMapToMemstore(Map<byte[], List<Cell>> familyMap,
long mvccNum, List<Cell> memstoreCells, boolean isInReplay) throws IOException {
long size = 0;
for (Map.Entry<byte[], List<Cell>> e : familyMap.entrySet()) {
byte[] family = e.getKey();
List<Cell> cells = e.getValue();
assert cells instanceof RandomAccess;
Store store = getStore(family);
int listSize = cells.size();
for (int i=0; i < listSize; i++) {
Cell cell = cells.get(i);
CellUtil.setSequenceId(cell, mvccNum);
Pair<Long, Cell> ret = store.add(cell);
size += ret.getFirst();
memstoreCells.add(ret.getSecond());
if(isInReplay) {
// set memstore newly added cells with replay mvcc number
CellUtil.setSequenceId(ret.getSecond(), mvccNum);
}
}
}
return size;
}
/**
* Remove all the keys listed in the map from the memstore. This method is
* called when a Put/Delete has updated memstore but subsequently fails to update
* the wal. This method is then invoked to rollback the memstore.
*/
private void rollbackMemstore(List<Cell> memstoreCells) {
int kvsRolledback = 0;
for (Cell cell : memstoreCells) {
byte[] family = CellUtil.cloneFamily(cell);
Store store = getStore(family);
store.rollback(cell);
kvsRolledback++;
}
LOG.debug("rollbackMemstore rolled back " + kvsRolledback);
}
@Override
public void checkFamilies(Collection<byte[]> families) throws NoSuchColumnFamilyException {
for (byte[] family : families) {
checkFamily(family);
}
}
/**
* During replay, there could exist column families which are removed between region server
* failure and replay
*/
private void removeNonExistentColumnFamilyForReplay(
final Map<byte[], List<Cell>> familyMap) {
List<byte[]> nonExistentList = null;
for (byte[] family : familyMap.keySet()) {
if (!this.htableDescriptor.hasFamily(family)) {
if (nonExistentList == null) {
nonExistentList = new ArrayList<byte[]>();
}
nonExistentList.add(family);
}
}
if (nonExistentList != null) {
for (byte[] family : nonExistentList) {
// Perhaps schema was changed between crash and replay
LOG.info("No family for " + Bytes.toString(family) + " omit from reply.");
familyMap.remove(family);
}
}
}
@Override
public void checkTimestamps(final Map<byte[], List<Cell>> familyMap, long now)
throws FailedSanityCheckException {
if (timestampSlop == HConstants.LATEST_TIMESTAMP) {
return;
}
long maxTs = now + timestampSlop;
for (List<Cell> kvs : familyMap.values()) {
assert kvs instanceof RandomAccess;
int listSize = kvs.size();
for (int i=0; i < listSize; i++) {
Cell cell = kvs.get(i);
// see if the user-side TS is out of range. latest = server-side
long ts = cell.getTimestamp();
if (ts != HConstants.LATEST_TIMESTAMP && ts > maxTs) {
throw new FailedSanityCheckException("Timestamp for KV out of range "
+ cell + " (too.new=" + timestampSlop + ")");
}
}
}
}
/**
* Append the given map of family->edits to a WALEdit data structure.
* This does not write to the WAL itself.
* @param familyMap map of family->edits
* @param walEdit the destination entry to append into
*/
private void addFamilyMapToWALEdit(Map<byte[], List<Cell>> familyMap,
WALEdit walEdit) {
for (List<Cell> edits : familyMap.values()) {
assert edits instanceof RandomAccess;
int listSize = edits.size();
for (int i=0; i < listSize; i++) {
Cell cell = edits.get(i);
walEdit.add(cell);
}
}
}
private void requestFlush() {
if (this.rsServices == null) {
return;
}
synchronized (writestate) {
if (this.writestate.isFlushRequested()) {
return;
}
writestate.flushRequested = true;
}
// Make request outside of synchronize block; HBASE-818.
this.rsServices.getFlushRequester().requestFlush(this, false);
if (LOG.isDebugEnabled()) {
LOG.debug("Flush requested on " + this);
}
}
/*
* @param size
* @return True if size is over the flush threshold
*/
private boolean isFlushSize(final long size) {
return size > this.memstoreFlushSize;
}
/**
* Read the edits put under this region by wal splitting process. Put
* the recovered edits back up into this region.
*
* <p>We can ignore any wal message that has a sequence ID that's equal to or
* lower than minSeqId. (Because we know such messages are already
* reflected in the HFiles.)
*
* <p>While this is running we are putting pressure on memory yet we are
* outside of our usual accounting because we are not yet an onlined region
* (this stuff is being run as part of Region initialization). This means
* that if we're up against global memory limits, we'll not be flagged to flush
* because we are not online. We can't be flushed by usual mechanisms anyways;
* we're not yet online so our relative sequenceids are not yet aligned with
* WAL sequenceids -- not till we come up online, post processing of split
* edits.
*
* <p>But to help relieve memory pressure, at least manage our own heap size
* flushing if are in excess of per-region limits. Flushing, though, we have
* to be careful and avoid using the regionserver/wal sequenceid. Its running
* on a different line to whats going on in here in this region context so if we
* crashed replaying these edits, but in the midst had a flush that used the
* regionserver wal with a sequenceid in excess of whats going on in here
* in this region and with its split editlogs, then we could miss edits the
* next time we go to recover. So, we have to flush inline, using seqids that
* make sense in a this single region context only -- until we online.
*
* @param maxSeqIdInStores Any edit found in split editlogs needs to be in excess of
* the maxSeqId for the store to be applied, else its skipped.
* @return the sequence id of the last edit added to this region out of the
* recovered edits log or <code>minSeqId</code> if nothing added from editlogs.
* @throws UnsupportedEncodingException
* @throws IOException
*/
protected long replayRecoveredEditsIfAny(final Path regiondir,
Map<byte[], Long> maxSeqIdInStores,
final CancelableProgressable reporter, final MonitoredTask status)
throws IOException {
long minSeqIdForTheRegion = -1;
for (Long maxSeqIdInStore : maxSeqIdInStores.values()) {
if (maxSeqIdInStore < minSeqIdForTheRegion || minSeqIdForTheRegion == -1) {
minSeqIdForTheRegion = maxSeqIdInStore;
}
}
long seqid = minSeqIdForTheRegion;
FileSystem fs = this.fs.getFileSystem();
NavigableSet<Path> files = WALSplitter.getSplitEditFilesSorted(fs, regiondir);
if (LOG.isDebugEnabled()) {
LOG.debug("Found " + (files == null ? 0 : files.size())
+ " recovered edits file(s) under " + regiondir);
}
if (files == null || files.isEmpty()) return seqid;
for (Path edits: files) {
if (edits == null || !fs.exists(edits)) {
LOG.warn("Null or non-existent edits file: " + edits);
continue;
}
if (isZeroLengthThenDelete(fs, edits)) continue;
long maxSeqId;
String fileName = edits.getName();
maxSeqId = Math.abs(Long.parseLong(fileName));
if (maxSeqId <= minSeqIdForTheRegion) {
if (LOG.isDebugEnabled()) {
String msg = "Maximum sequenceid for this wal is " + maxSeqId
+ " and minimum sequenceid for the region is " + minSeqIdForTheRegion
+ ", skipped the whole file, path=" + edits;
LOG.debug(msg);
}
continue;
}
try {
// replay the edits. Replay can return -1 if everything is skipped, only update
// if seqId is greater
seqid = Math.max(seqid, replayRecoveredEdits(edits, maxSeqIdInStores, reporter));
} catch (IOException e) {
boolean skipErrors = conf.getBoolean(
HConstants.HREGION_EDITS_REPLAY_SKIP_ERRORS,
conf.getBoolean(
"hbase.skip.errors",
HConstants.DEFAULT_HREGION_EDITS_REPLAY_SKIP_ERRORS));
if (conf.get("hbase.skip.errors") != null) {
LOG.warn(
"The property 'hbase.skip.errors' has been deprecated. Please use " +
HConstants.HREGION_EDITS_REPLAY_SKIP_ERRORS + " instead.");
}
if (skipErrors) {
Path p = WALSplitter.moveAsideBadEditsFile(fs, edits);
LOG.error(HConstants.HREGION_EDITS_REPLAY_SKIP_ERRORS
+ "=true so continuing. Renamed " + edits +
" as " + p, e);
} else {
throw e;
}
}
}
// The edits size added into rsAccounting during this replaying will not
// be required any more. So just clear it.
if (this.rsAccounting != null) {
this.rsAccounting.clearRegionReplayEditsSize(getRegionInfo().getRegionName());
}
if (seqid > minSeqIdForTheRegion) {
// Then we added some edits to memory. Flush and cleanup split edit files.
internalFlushcache(null, seqid, stores.values(), status, false);
}
// Now delete the content of recovered edits. We're done w/ them.
if (files.size() > 0 && this.conf.getBoolean("hbase.region.archive.recovered.edits", false)) {
// For debugging data loss issues!
// If this flag is set, make use of the hfile archiving by making recovered.edits a fake
// column family. Have to fake out file type too by casting our recovered.edits as storefiles
String fakeFamilyName = WALSplitter.getRegionDirRecoveredEditsDir(regiondir).getName();
Set<StoreFile> fakeStoreFiles = new HashSet<StoreFile>(files.size());
for (Path file: files) {
fakeStoreFiles.add(new StoreFile(getRegionFileSystem().getFileSystem(), file, this.conf,
null, null));
}
getRegionFileSystem().removeStoreFiles(fakeFamilyName, fakeStoreFiles);
} else {
for (Path file: files) {
if (!fs.delete(file, false)) {
LOG.error("Failed delete of " + file);
} else {
LOG.debug("Deleted recovered.edits file=" + file);
}
}
}
return seqid;
}
/*
* @param edits File of recovered edits.
* @param maxSeqIdInStores Maximum sequenceid found in each store. Edits in wal
* must be larger than this to be replayed for each store.
* @param reporter
* @return the sequence id of the last edit added to this region out of the
* recovered edits log or <code>minSeqId</code> if nothing added from editlogs.
* @throws IOException
*/
private long replayRecoveredEdits(final Path edits,
Map<byte[], Long> maxSeqIdInStores, final CancelableProgressable reporter)
throws IOException {
String msg = "Replaying edits from " + edits;
LOG.info(msg);
MonitoredTask status = TaskMonitor.get().createStatus(msg);
FileSystem fs = this.fs.getFileSystem();
status.setStatus("Opening recovered edits");
WAL.Reader reader = null;
try {
reader = WALFactory.createReader(fs, edits, conf);
long currentEditSeqId = -1;
long currentReplaySeqId = -1;
long firstSeqIdInLog = -1;
long skippedEdits = 0;
long editsCount = 0;
long intervalEdits = 0;
WAL.Entry entry;
Store store = null;
boolean reported_once = false;
ServerNonceManager ng = this.rsServices == null ? null : this.rsServices.getNonceManager();
try {
// How many edits seen before we check elapsed time
int interval = this.conf.getInt("hbase.hstore.report.interval.edits", 2000);
// How often to send a progress report (default 1/2 master timeout)
int period = this.conf.getInt("hbase.hstore.report.period", 300000);
long lastReport = EnvironmentEdgeManager.currentTime();
while ((entry = reader.next()) != null) {
WALKey key = entry.getKey();
WALEdit val = entry.getEdit();
if (ng != null) { // some test, or nonces disabled
ng.reportOperationFromWal(key.getNonceGroup(), key.getNonce(), key.getWriteTime());
}
if (reporter != null) {
intervalEdits += val.size();
if (intervalEdits >= interval) {
// Number of edits interval reached
intervalEdits = 0;
long cur = EnvironmentEdgeManager.currentTime();
if (lastReport + period <= cur) {
status.setStatus("Replaying edits..." +
" skipped=" + skippedEdits +
" edits=" + editsCount);
// Timeout reached
if(!reporter.progress()) {
msg = "Progressable reporter failed, stopping replay";
LOG.warn(msg);
status.abort(msg);
throw new IOException(msg);
}
reported_once = true;
lastReport = cur;
}
}
}
if (firstSeqIdInLog == -1) {
firstSeqIdInLog = key.getLogSeqNum();
}
if (currentEditSeqId > key.getLogSeqNum()) {
// when this condition is true, it means we have a serious defect because we need to
// maintain increasing SeqId for WAL edits per region
LOG.error(getRegionInfo().getEncodedName() + " : "
+ "Found decreasing SeqId. PreId=" + currentEditSeqId + " key=" + key
+ "; edit=" + val);
} else {
currentEditSeqId = key.getLogSeqNum();
}
currentReplaySeqId = (key.getOrigLogSeqNum() > 0) ?
key.getOrigLogSeqNum() : currentEditSeqId;
// Start coprocessor replay here. The coprocessor is for each WALEdit
// instead of a KeyValue.
if (coprocessorHost != null) {
status.setStatus("Running pre-WAL-restore hook in coprocessors");
if (coprocessorHost.preWALRestore(this.getRegionInfo(), key, val)) {
// if bypass this wal entry, ignore it ...
continue;
}
}
// Check this edit is for this region.
if (!Bytes.equals(key.getEncodedRegionName(),
this.getRegionInfo().getEncodedNameAsBytes())) {
skippedEdits++;
continue;
}
boolean flush = false;
for (Cell cell: val.getCells()) {
// Check this edit is for me. Also, guard against writing the special
// METACOLUMN info such as HBASE::CACHEFLUSH entries
if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) {
//this is a special edit, we should handle it
CompactionDescriptor compaction = WALEdit.getCompaction(cell);
if (compaction != null) {
//replay the compaction
replayWALCompactionMarker(compaction, false, true, Long.MAX_VALUE);
}
skippedEdits++;
continue;
}
// Figure which store the edit is meant for.
if (store == null || !CellUtil.matchingFamily(cell, store.getFamily().getName())) {
store = getStore(cell);
}
if (store == null) {
// This should never happen. Perhaps schema was changed between
// crash and redeploy?
LOG.warn("No family for " + cell);
skippedEdits++;
continue;
}
// Now, figure if we should skip this edit.
if (key.getLogSeqNum() <= maxSeqIdInStores.get(store.getFamily()
.getName())) {
skippedEdits++;
continue;
}
CellUtil.setSequenceId(cell, currentReplaySeqId);
// Once we are over the limit, restoreEdit will keep returning true to
// flush -- but don't flush until we've played all the kvs that make up
// the WALEdit.
flush |= restoreEdit(store, cell);
editsCount++;
}
if (flush) {
internalFlushcache(null, currentEditSeqId, stores.values(), status, false);
}
if (coprocessorHost != null) {
coprocessorHost.postWALRestore(this.getRegionInfo(), key, val);
}
}
} catch (EOFException eof) {
Path p = WALSplitter.moveAsideBadEditsFile(fs, edits);
msg = "Encountered EOF. Most likely due to Master failure during " +
"wal splitting, so we have this data in another edit. " +
"Continuing, but renaming " + edits + " as " + p;
LOG.warn(msg, eof);
status.abort(msg);
} catch (IOException ioe) {
// If the IOE resulted from bad file format,
// then this problem is idempotent and retrying won't help
if (ioe.getCause() instanceof ParseException) {
Path p = WALSplitter.moveAsideBadEditsFile(fs, edits);
msg = "File corruption encountered! " +
"Continuing, but renaming " + edits + " as " + p;
LOG.warn(msg, ioe);
status.setStatus(msg);
} else {
status.abort(StringUtils.stringifyException(ioe));
// other IO errors may be transient (bad network connection,
// checksum exception on one datanode, etc). throw & retry
throw ioe;
}
}
if (reporter != null && !reported_once) {
reporter.progress();
}
msg = "Applied " + editsCount + ", skipped " + skippedEdits +
", firstSequenceIdInLog=" + firstSeqIdInLog +
", maxSequenceIdInLog=" + currentEditSeqId + ", path=" + edits;
status.markComplete(msg);
LOG.debug(msg);
return currentEditSeqId;
} finally {
status.cleanup();
if (reader != null) {
reader.close();
}
}
}
/**
* Call to complete a compaction. Its for the case where we find in the WAL a compaction
* that was not finished. We could find one recovering a WAL after a regionserver crash.
* See HBASE-2331.
*/
void replayWALCompactionMarker(CompactionDescriptor compaction, boolean pickCompactionFiles,
boolean removeFiles, long replaySeqId)
throws IOException {
checkTargetRegion(compaction.getEncodedRegionName().toByteArray(),
"Compaction marker from WAL ", compaction);
synchronized (writestate) {
if (replaySeqId < lastReplayedOpenRegionSeqId) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying compaction event :" + TextFormat.shortDebugString(compaction)
+ " because its sequence id " + replaySeqId + " is smaller than this regions "
+ "lastReplayedOpenRegionSeqId of " + lastReplayedOpenRegionSeqId);
return;
}
if (replaySeqId < lastReplayedCompactionSeqId) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying compaction event :" + TextFormat.shortDebugString(compaction)
+ " because its sequence id " + replaySeqId + " is smaller than this regions "
+ "lastReplayedCompactionSeqId of " + lastReplayedCompactionSeqId);
return;
} else {
lastReplayedCompactionSeqId = replaySeqId;
}
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ "Replaying compaction marker " + TextFormat.shortDebugString(compaction)
+ " with seqId=" + replaySeqId + " and lastReplayedOpenRegionSeqId="
+ lastReplayedOpenRegionSeqId);
}
startRegionOperation(Operation.REPLAY_EVENT);
try {
Store store = this.getStore(compaction.getFamilyName().toByteArray());
if (store == null) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Found Compaction WAL edit for deleted family:"
+ Bytes.toString(compaction.getFamilyName().toByteArray()));
return;
}
store.replayCompactionMarker(compaction, pickCompactionFiles, removeFiles);
logRegionFiles();
} catch (FileNotFoundException ex) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "At least one of the store files in compaction: "
+ TextFormat.shortDebugString(compaction)
+ " doesn't exist any more. Skip loading the file(s)", ex);
} finally {
closeRegionOperation(Operation.REPLAY_EVENT);
}
}
}
void replayWALFlushMarker(FlushDescriptor flush, long replaySeqId) throws IOException {
checkTargetRegion(flush.getEncodedRegionName().toByteArray(),
"Flush marker from WAL ", flush);
if (ServerRegionReplicaUtil.isDefaultReplica(this.getRegionInfo())) {
return; // if primary nothing to do
}
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ "Replaying flush marker " + TextFormat.shortDebugString(flush));
}
startRegionOperation(Operation.REPLAY_EVENT); // use region close lock to guard against close
try {
FlushAction action = flush.getAction();
switch (action) {
case START_FLUSH:
replayWALFlushStartMarker(flush);
break;
case COMMIT_FLUSH:
replayWALFlushCommitMarker(flush);
break;
case ABORT_FLUSH:
replayWALFlushAbortMarker(flush);
break;
case CANNOT_FLUSH:
replayWALFlushCannotFlushMarker(flush, replaySeqId);
break;
default:
LOG.warn(getRegionInfo().getEncodedName() + " : " +
"Received a flush event with unknown action, ignoring. " +
TextFormat.shortDebugString(flush));
break;
}
logRegionFiles();
} finally {
closeRegionOperation(Operation.REPLAY_EVENT);
}
}
/** Replay the flush marker from primary region by creating a corresponding snapshot of
* the store memstores, only if the memstores do not have a higher seqId from an earlier wal
* edit (because the events may be coming out of order).
*/
@VisibleForTesting
PrepareFlushResult replayWALFlushStartMarker(FlushDescriptor flush) throws IOException {
long flushSeqId = flush.getFlushSequenceNumber();
HashSet<Store> storesToFlush = new HashSet<Store>();
for (StoreFlushDescriptor storeFlush : flush.getStoreFlushesList()) {
byte[] family = storeFlush.getFamilyName().toByteArray();
Store store = getStore(family);
if (store == null) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush start marker from primary, but the family is not found. Ignoring"
+ " StoreFlushDescriptor:" + TextFormat.shortDebugString(storeFlush));
continue;
}
storesToFlush.add(store);
}
MonitoredTask status = TaskMonitor.get().createStatus("Preparing flush " + this);
// we will use writestate as a coarse-grain lock for all the replay events
// (flush, compaction, region open etc)
synchronized (writestate) {
try {
if (flush.getFlushSequenceNumber() < lastReplayedOpenRegionSeqId) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying flush event :" + TextFormat.shortDebugString(flush)
+ " because its sequence id is smaller than this regions lastReplayedOpenRegionSeqId "
+ " of " + lastReplayedOpenRegionSeqId);
return null;
}
if (numMutationsWithoutWAL.get() > 0) {
numMutationsWithoutWAL.set(0);
dataInMemoryWithoutWAL.set(0);
}
if (!writestate.flushing) {
// we do not have an active snapshot and corresponding this.prepareResult. This means
// we can just snapshot our memstores and continue as normal.
// invoke prepareFlushCache. Send null as wal since we do not want the flush events in wal
PrepareFlushResult prepareResult = internalPrepareFlushCache(null,
flushSeqId, storesToFlush, status, false);
if (prepareResult.result == null) {
// save the PrepareFlushResult so that we can use it later from commit flush
this.writestate.flushing = true;
this.prepareFlushResult = prepareResult;
status.markComplete("Flush prepare successful");
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ " Prepared flush with seqId:" + flush.getFlushSequenceNumber());
}
} else {
// special case empty memstore. We will still save the flush result in this case, since
// our memstore ie empty, but the primary is still flushing
if (prepareResult.getResult().getResult() ==
FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY) {
this.writestate.flushing = true;
this.prepareFlushResult = prepareResult;
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ " Prepared empty flush with seqId:" + flush.getFlushSequenceNumber());
}
}
status.abort("Flush prepare failed with " + prepareResult.result);
// nothing much to do. prepare flush failed because of some reason.
}
return prepareResult;
} else {
// we already have an active snapshot.
if (flush.getFlushSequenceNumber() == this.prepareFlushResult.flushOpSeqId) {
// They define the same flush. Log and continue.
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush prepare marker with the same seqId: " +
+ flush.getFlushSequenceNumber() + " before clearing the previous one with seqId: "
+ prepareFlushResult.flushOpSeqId + ". Ignoring");
// ignore
} else if (flush.getFlushSequenceNumber() < this.prepareFlushResult.flushOpSeqId) {
// We received a flush with a smaller seqNum than what we have prepared. We can only
// ignore this prepare flush request.
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush prepare marker with a smaller seqId: " +
+ flush.getFlushSequenceNumber() + " before clearing the previous one with seqId: "
+ prepareFlushResult.flushOpSeqId + ". Ignoring");
// ignore
} else {
// We received a flush with a larger seqNum than what we have prepared
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush prepare marker with a larger seqId: " +
+ flush.getFlushSequenceNumber() + " before clearing the previous one with seqId: "
+ prepareFlushResult.flushOpSeqId + ". Ignoring");
// We do not have multiple active snapshots in the memstore or a way to merge current
// memstore snapshot with the contents and resnapshot for now. We cannot take
// another snapshot and drop the previous one because that will cause temporary
// data loss in the secondary. So we ignore this for now, deferring the resolution
// to happen when we see the corresponding flush commit marker. If we have a memstore
// snapshot with x, and later received another prepare snapshot with y (where x < y),
// when we see flush commit for y, we will drop snapshot for x, and can also drop all
// the memstore edits if everything in memstore is < y. This is the usual case for
// RS crash + recovery where we might see consequtive prepare flush wal markers.
// Otherwise, this will cause more memory to be used in secondary replica until a
// further prapare + commit flush is seen and replayed.
}
}
} finally {
status.cleanup();
writestate.notifyAll();
}
}
return null;
}
@VisibleForTesting
void replayWALFlushCommitMarker(FlushDescriptor flush) throws IOException {
MonitoredTask status = TaskMonitor.get().createStatus("Committing flush " + this);
// check whether we have the memstore snapshot with the corresponding seqId. Replay to
// secondary region replicas are in order, except for when the region moves or then the
// region server crashes. In those cases, we may receive replay requests out of order from
// the original seqIds.
synchronized (writestate) {
try {
if (flush.getFlushSequenceNumber() < lastReplayedOpenRegionSeqId) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying flush event :" + TextFormat.shortDebugString(flush)
+ " because its sequence id is smaller than this regions lastReplayedOpenRegionSeqId "
+ " of " + lastReplayedOpenRegionSeqId);
return;
}
if (writestate.flushing) {
PrepareFlushResult prepareFlushResult = this.prepareFlushResult;
if (flush.getFlushSequenceNumber() == prepareFlushResult.flushOpSeqId) {
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ "Received a flush commit marker with seqId:" + flush.getFlushSequenceNumber()
+ " and a previous prepared snapshot was found");
}
// This is the regular case where we received commit flush after prepare flush
// corresponding to the same seqId.
replayFlushInStores(flush, prepareFlushResult, true);
// Set down the memstore size by amount of flush.
this.addAndGetGlobalMemstoreSize(-prepareFlushResult.totalFlushableSize);
this.prepareFlushResult = null;
writestate.flushing = false;
} else if (flush.getFlushSequenceNumber() < prepareFlushResult.flushOpSeqId) {
// This should not happen normally. However, lets be safe and guard against these cases
// we received a flush commit with a smaller seqId than what we have prepared
// we will pick the flush file up from this commit (if we have not seen it), but we
// will not drop the memstore
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush commit marker with smaller seqId: "
+ flush.getFlushSequenceNumber() + " than what we have prepared with seqId: "
+ prepareFlushResult.flushOpSeqId + ". Picking up new file, but not dropping"
+" prepared memstore snapshot");
replayFlushInStores(flush, prepareFlushResult, false);
// snapshot is not dropped, so memstore sizes should not be decremented
// we still have the prepared snapshot, flushing should still be true
} else {
// This should not happen normally. However, lets be safe and guard against these cases
// we received a flush commit with a larger seqId than what we have prepared
// we will pick the flush file for this. We will also obtain the updates lock and
// look for contents of the memstore to see whether we have edits after this seqId.
// If not, we will drop all the memstore edits and the snapshot as well.
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush commit marker with larger seqId: "
+ flush.getFlushSequenceNumber() + " than what we have prepared with seqId: " +
prepareFlushResult.flushOpSeqId + ". Picking up new file and dropping prepared"
+" memstore snapshot");
replayFlushInStores(flush, prepareFlushResult, true);
// Set down the memstore size by amount of flush.
this.addAndGetGlobalMemstoreSize(-prepareFlushResult.totalFlushableSize);
// Inspect the memstore contents to see whether the memstore contains only edits
// with seqId smaller than the flush seqId. If so, we can discard those edits.
dropMemstoreContentsForSeqId(flush.getFlushSequenceNumber(), null);
this.prepareFlushResult = null;
writestate.flushing = false;
}
// If we were waiting for observing a flush or region opening event for not showing
// partial data after a secondary region crash, we can allow reads now. We can only make
// sure that we are not showing partial data (for example skipping some previous edits)
// until we observe a full flush start and flush commit. So if we were not able to find
// a previous flush we will not enable reads now.
this.setReadsEnabled(true);
} else {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush commit marker with seqId:" + flush.getFlushSequenceNumber()
+ ", but no previous prepared snapshot was found");
// There is no corresponding prepare snapshot from before.
// We will pick up the new flushed file
replayFlushInStores(flush, null, false);
// Inspect the memstore contents to see whether the memstore contains only edits
// with seqId smaller than the flush seqId. If so, we can discard those edits.
dropMemstoreContentsForSeqId(flush.getFlushSequenceNumber(), null);
}
status.markComplete("Flush commit successful");
// Update the last flushed sequence id for region.
this.maxFlushedSeqId = flush.getFlushSequenceNumber();
// advance the mvcc read point so that the new flushed file is visible.
// there may be some in-flight transactions, but they won't be made visible since they are
// either greater than flush seq number or they were already dropped via flush.
// TODO: If we are using FlushAllStoresPolicy, then this can make edits visible from other
// stores while they are still in flight because the flush commit marker will not contain
// flushes from ALL stores.
getMVCC().advanceMemstoreReadPointIfNeeded(flush.getFlushSequenceNumber());
} catch (FileNotFoundException ex) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "At least one of the store files in flush: " + TextFormat.shortDebugString(flush)
+ " doesn't exist any more. Skip loading the file(s)", ex);
}
finally {
status.cleanup();
writestate.notifyAll();
}
}
// C. Finally notify anyone waiting on memstore to clear:
// e.g. checkResources().
synchronized (this) {
notifyAll(); // FindBugs NN_NAKED_NOTIFY
}
}
/**
* Replays the given flush descriptor by opening the flush files in stores and dropping the
* memstore snapshots if requested.
* @param flush
* @param prepareFlushResult
* @param dropMemstoreSnapshot
* @throws IOException
*/
private void replayFlushInStores(FlushDescriptor flush, PrepareFlushResult prepareFlushResult,
boolean dropMemstoreSnapshot)
throws IOException {
for (StoreFlushDescriptor storeFlush : flush.getStoreFlushesList()) {
byte[] family = storeFlush.getFamilyName().toByteArray();
Store store = getStore(family);
if (store == null) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a flush commit marker from primary, but the family is not found."
+ "Ignoring StoreFlushDescriptor:" + storeFlush);
continue;
}
List<String> flushFiles = storeFlush.getFlushOutputList();
StoreFlushContext ctx = null;
long startTime = EnvironmentEdgeManager.currentTime();
if (prepareFlushResult == null || prepareFlushResult.storeFlushCtxs == null) {
ctx = store.createFlushContext(flush.getFlushSequenceNumber());
} else {
ctx = prepareFlushResult.storeFlushCtxs.get(family);
startTime = prepareFlushResult.startTime;
}
if (ctx == null) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Unexpected: flush commit marker received from store "
+ Bytes.toString(family) + " but no associated flush context. Ignoring");
continue;
}
ctx.replayFlush(flushFiles, dropMemstoreSnapshot); // replay the flush
// Record latest flush time
this.lastStoreFlushTimeMap.put(store, startTime);
}
}
/**
* Drops the memstore contents after replaying a flush descriptor or region open event replay
* if the memstore edits have seqNums smaller than the given seq id
* @param flush the flush descriptor
* @throws IOException
*/
private long dropMemstoreContentsForSeqId(long seqId, Store store) throws IOException {
long totalFreedSize = 0;
this.updatesLock.writeLock().lock();
try {
mvcc.waitForPreviousTransactionsComplete();
long currentSeqId = getSequenceId().get();
if (seqId >= currentSeqId) {
// then we can drop the memstore contents since everything is below this seqId
LOG.info(getRegionInfo().getEncodedName() + " : "
+ "Dropping memstore contents as well since replayed flush seqId: "
+ seqId + " is greater than current seqId:" + currentSeqId);
// Prepare flush (take a snapshot) and then abort (drop the snapshot)
if (store == null ) {
for (Store s : stores.values()) {
totalFreedSize += doDropStoreMemstoreContentsForSeqId(s, currentSeqId);
}
} else {
totalFreedSize += doDropStoreMemstoreContentsForSeqId(store, currentSeqId);
}
} else {
LOG.info(getRegionInfo().getEncodedName() + " : "
+ "Not dropping memstore contents since replayed flush seqId: "
+ seqId + " is smaller than current seqId:" + currentSeqId);
}
} finally {
this.updatesLock.writeLock().unlock();
}
return totalFreedSize;
}
private long doDropStoreMemstoreContentsForSeqId(Store s, long currentSeqId) throws IOException {
long snapshotSize = s.getFlushableSize();
this.addAndGetGlobalMemstoreSize(-snapshotSize);
StoreFlushContext ctx = s.createFlushContext(currentSeqId);
ctx.prepare();
ctx.abort();
return snapshotSize;
}
private void replayWALFlushAbortMarker(FlushDescriptor flush) {
// nothing to do for now. A flush abort will cause a RS abort which means that the region
// will be opened somewhere else later. We will see the region open event soon, and replaying
// that will drop the snapshot
}
private void replayWALFlushCannotFlushMarker(FlushDescriptor flush, long replaySeqId) {
synchronized (writestate) {
if (this.lastReplayedOpenRegionSeqId > replaySeqId) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying flush event :" + TextFormat.shortDebugString(flush)
+ " because its sequence id " + replaySeqId + " is smaller than this regions "
+ "lastReplayedOpenRegionSeqId of " + lastReplayedOpenRegionSeqId);
return;
}
// If we were waiting for observing a flush or region opening event for not showing partial
// data after a secondary region crash, we can allow reads now. This event means that the
// primary was not able to flush because memstore is empty when we requested flush. By the
// time we observe this, we are guaranteed to have up to date seqId with our previous
// assignment.
this.setReadsEnabled(true);
}
}
@VisibleForTesting
PrepareFlushResult getPrepareFlushResult() {
return prepareFlushResult;
}
void replayWALRegionEventMarker(RegionEventDescriptor regionEvent) throws IOException {
checkTargetRegion(regionEvent.getEncodedRegionName().toByteArray(),
"RegionEvent marker from WAL ", regionEvent);
startRegionOperation(Operation.REPLAY_EVENT);
try {
if (ServerRegionReplicaUtil.isDefaultReplica(this.getRegionInfo())) {
return; // if primary nothing to do
}
if (regionEvent.getEventType() == EventType.REGION_CLOSE) {
// nothing to do on REGION_CLOSE for now.
return;
}
if (regionEvent.getEventType() != EventType.REGION_OPEN) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Unknown region event received, ignoring :"
+ TextFormat.shortDebugString(regionEvent));
return;
}
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ "Replaying region open event marker " + TextFormat.shortDebugString(regionEvent));
}
// we will use writestate as a coarse-grain lock for all the replay events
synchronized (writestate) {
// Replication can deliver events out of order when primary region moves or the region
// server crashes, since there is no coordination between replication of different wal files
// belonging to different region servers. We have to safe guard against this case by using
// region open event's seqid. Since this is the first event that the region puts (after
// possibly flushing recovered.edits), after seeing this event, we can ignore every edit
// smaller than this seqId
if (this.lastReplayedOpenRegionSeqId <= regionEvent.getLogSequenceNumber()) {
this.lastReplayedOpenRegionSeqId = regionEvent.getLogSequenceNumber();
} else {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying region event :" + TextFormat.shortDebugString(regionEvent)
+ " because its sequence id is smaller than this regions lastReplayedOpenRegionSeqId "
+ " of " + lastReplayedOpenRegionSeqId);
return;
}
// region open lists all the files that the region has at the time of the opening. Just pick
// all the files and drop prepared flushes and empty memstores
for (StoreDescriptor storeDescriptor : regionEvent.getStoresList()) {
// stores of primary may be different now
byte[] family = storeDescriptor.getFamilyName().toByteArray();
Store store = getStore(family);
if (store == null) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a region open marker from primary, but the family is not found. "
+ "Ignoring. StoreDescriptor:" + storeDescriptor);
continue;
}
long storeSeqId = store.getMaxSequenceId();
List<String> storeFiles = storeDescriptor.getStoreFileList();
try {
store.refreshStoreFiles(storeFiles); // replace the files with the new ones
} catch (FileNotFoundException ex) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "At least one of the store files: " + storeFiles
+ " doesn't exist any more. Skip loading the file(s)", ex);
continue;
}
if (store.getMaxSequenceId() != storeSeqId) {
// Record latest flush time if we picked up new files
lastStoreFlushTimeMap.put(store, EnvironmentEdgeManager.currentTime());
}
if (writestate.flushing) {
// only drop memstore snapshots if they are smaller than last flush for the store
if (this.prepareFlushResult.flushOpSeqId <= regionEvent.getLogSequenceNumber()) {
StoreFlushContext ctx = this.prepareFlushResult.storeFlushCtxs == null ?
null : this.prepareFlushResult.storeFlushCtxs.get(family);
if (ctx != null) {
long snapshotSize = store.getFlushableSize();
ctx.abort();
this.addAndGetGlobalMemstoreSize(-snapshotSize);
this.prepareFlushResult.storeFlushCtxs.remove(family);
}
}
}
// Drop the memstore contents if they are now smaller than the latest seen flushed file
dropMemstoreContentsForSeqId(regionEvent.getLogSequenceNumber(), store);
if (storeSeqId > this.maxFlushedSeqId) {
this.maxFlushedSeqId = storeSeqId;
}
}
// if all stores ended up dropping their snapshots, we can safely drop the
// prepareFlushResult
dropPrepareFlushIfPossible();
// advance the mvcc read point so that the new flushed file is visible.
// there may be some in-flight transactions, but they won't be made visible since they are
// either greater than flush seq number or they were already dropped via flush.
getMVCC().advanceMemstoreReadPointIfNeeded(this.maxFlushedSeqId);
// If we were waiting for observing a flush or region opening event for not showing partial
// data after a secondary region crash, we can allow reads now.
this.setReadsEnabled(true);
// C. Finally notify anyone waiting on memstore to clear:
// e.g. checkResources().
synchronized (this) {
notifyAll(); // FindBugs NN_NAKED_NOTIFY
}
}
logRegionFiles();
} finally {
closeRegionOperation(Operation.REPLAY_EVENT);
}
}
void replayWALBulkLoadEventMarker(WALProtos.BulkLoadDescriptor bulkLoadEvent) throws IOException {
checkTargetRegion(bulkLoadEvent.getEncodedRegionName().toByteArray(),
"BulkLoad marker from WAL ", bulkLoadEvent);
if (ServerRegionReplicaUtil.isDefaultReplica(this.getRegionInfo())) {
return; // if primary nothing to do
}
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ "Replaying bulkload event marker " + TextFormat.shortDebugString(bulkLoadEvent));
}
// check if multiple families involved
boolean multipleFamilies = false;
byte[] family = null;
for (StoreDescriptor storeDescriptor : bulkLoadEvent.getStoresList()) {
byte[] fam = storeDescriptor.getFamilyName().toByteArray();
if (family == null) {
family = fam;
} else if (!Bytes.equals(family, fam)) {
multipleFamilies = true;
break;
}
}
startBulkRegionOperation(multipleFamilies);
try {
// we will use writestate as a coarse-grain lock for all the replay events
synchronized (writestate) {
// Replication can deliver events out of order when primary region moves or the region
// server crashes, since there is no coordination between replication of different wal files
// belonging to different region servers. We have to safe guard against this case by using
// region open event's seqid. Since this is the first event that the region puts (after
// possibly flushing recovered.edits), after seeing this event, we can ignore every edit
// smaller than this seqId
if (bulkLoadEvent.getBulkloadSeqNum() >= 0
&& this.lastReplayedOpenRegionSeqId >= bulkLoadEvent.getBulkloadSeqNum()) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Skipping replaying bulkload event :"
+ TextFormat.shortDebugString(bulkLoadEvent)
+ " because its sequence id is smaller than this region's lastReplayedOpenRegionSeqId"
+ " =" + lastReplayedOpenRegionSeqId);
return;
}
for (StoreDescriptor storeDescriptor : bulkLoadEvent.getStoresList()) {
// stores of primary may be different now
family = storeDescriptor.getFamilyName().toByteArray();
Store store = getStore(family);
if (store == null) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ "Received a bulk load marker from primary, but the family is not found. "
+ "Ignoring. StoreDescriptor:" + storeDescriptor);
continue;
}
List<String> storeFiles = storeDescriptor.getStoreFileList();
for (String storeFile : storeFiles) {
StoreFileInfo storeFileInfo = null;
try {
storeFileInfo = fs.getStoreFileInfo(Bytes.toString(family), storeFile);
store.bulkLoadHFile(storeFileInfo);
} catch(FileNotFoundException ex) {
LOG.warn(getRegionInfo().getEncodedName() + " : "
+ ((storeFileInfo != null) ? storeFileInfo.toString() :
(new Path(Bytes.toString(family), storeFile)).toString())
+ " doesn't exist any more. Skip loading the file");
}
}
}
}
if (bulkLoadEvent.getBulkloadSeqNum() > 0) {
getMVCC().advanceMemstoreReadPointIfNeeded(bulkLoadEvent.getBulkloadSeqNum());
}
} finally {
closeBulkRegionOperation();
}
}
/**
* If all stores ended up dropping their snapshots, we can safely drop the prepareFlushResult
*/
private void dropPrepareFlushIfPossible() {
if (writestate.flushing) {
boolean canDrop = true;
if (prepareFlushResult.storeFlushCtxs != null) {
for (Entry<byte[], StoreFlushContext> entry
: prepareFlushResult.storeFlushCtxs.entrySet()) {
Store store = getStore(entry.getKey());
if (store == null) {
continue;
}
if (store.getSnapshotSize() > 0) {
canDrop = false;
break;
}
}
}
// this means that all the stores in the region has finished flushing, but the WAL marker
// may not have been written or we did not receive it yet.
if (canDrop) {
writestate.flushing = false;
this.prepareFlushResult = null;
}
}
}
@Override
public boolean refreshStoreFiles() throws IOException {
if (ServerRegionReplicaUtil.isDefaultReplica(this.getRegionInfo())) {
return false; // if primary nothing to do
}
if (LOG.isDebugEnabled()) {
LOG.debug(getRegionInfo().getEncodedName() + " : "
+ "Refreshing store files to see whether we can free up memstore");
}
long totalFreedSize = 0;
long smallestSeqIdInStores = Long.MAX_VALUE;
startRegionOperation(); // obtain region close lock
try {
synchronized (writestate) {
for (Store store : getStores()) {
// TODO: some stores might see new data from flush, while others do not which
// MIGHT break atomic edits across column families.
long maxSeqIdBefore = store.getMaxSequenceId();
// refresh the store files. This is similar to observing a region open wal marker.
store.refreshStoreFiles();
long storeSeqId = store.getMaxSequenceId();
if (storeSeqId < smallestSeqIdInStores) {
smallestSeqIdInStores = storeSeqId;
}
// see whether we can drop the memstore or the snapshot
if (storeSeqId > maxSeqIdBefore) {
if (writestate.flushing) {
// only drop memstore snapshots if they are smaller than last flush for the store
if (this.prepareFlushResult.flushOpSeqId <= storeSeqId) {
StoreFlushContext ctx = this.prepareFlushResult.storeFlushCtxs == null ?
null : this.prepareFlushResult.storeFlushCtxs.get(store.getFamily().getName());
if (ctx != null) {
long snapshotSize = store.getFlushableSize();
ctx.abort();
this.addAndGetGlobalMemstoreSize(-snapshotSize);
this.prepareFlushResult.storeFlushCtxs.remove(store.getFamily().getName());
totalFreedSize += snapshotSize;
}
}
}
// Drop the memstore contents if they are now smaller than the latest seen flushed file
totalFreedSize += dropMemstoreContentsForSeqId(storeSeqId, store);
}
}
// if all stores ended up dropping their snapshots, we can safely drop the
// prepareFlushResult
dropPrepareFlushIfPossible();
// advance the mvcc read point so that the new flushed files are visible.
// there may be some in-flight transactions, but they won't be made visible since they are
// either greater than flush seq number or they were already picked up via flush.
for (Store s : getStores()) {
getMVCC().advanceMemstoreReadPointIfNeeded(s.getMaxMemstoreTS());
}
// smallestSeqIdInStores is the seqId that we have a corresponding hfile for. We can safely
// skip all edits that are to be replayed in the future with that has a smaller seqId
// than this. We are updating lastReplayedOpenRegionSeqId so that we can skip all edits
// that we have picked the flush files for
if (this.lastReplayedOpenRegionSeqId < smallestSeqIdInStores) {
this.lastReplayedOpenRegionSeqId = smallestSeqIdInStores;
}
}
// C. Finally notify anyone waiting on memstore to clear:
// e.g. checkResources().
synchronized (this) {
notifyAll(); // FindBugs NN_NAKED_NOTIFY
}
return totalFreedSize > 0;
} finally {
closeRegionOperation();
}
}
private void logRegionFiles() {
if (LOG.isTraceEnabled()) {
LOG.trace(getRegionInfo().getEncodedName() + " : Store files for region: ");
for (Store s : stores.values()) {
for (StoreFile sf : s.getStorefiles()) {
LOG.trace(getRegionInfo().getEncodedName() + " : " + sf);
}
}
}
}
/** Checks whether the given regionName is either equal to our region, or that
* the regionName is the primary region to our corresponding range for the secondary replica.
*/
private void checkTargetRegion(byte[] encodedRegionName, String exceptionMsg, Object payload)
throws WrongRegionException {
if (Bytes.equals(this.getRegionInfo().getEncodedNameAsBytes(), encodedRegionName)) {
return;
}
if (!RegionReplicaUtil.isDefaultReplica(this.getRegionInfo()) &&
Bytes.equals(encodedRegionName,
this.fs.getRegionInfoForFS().getEncodedNameAsBytes())) {
return;
}
throw new WrongRegionException(exceptionMsg + payload
+ " targetted for region " + Bytes.toStringBinary(encodedRegionName)
+ " does not match this region: " + this.getRegionInfo());
}
/**
* Used by tests
* @param s Store to add edit too.
* @param cell Cell to add.
* @return True if we should flush.
*/
protected boolean restoreEdit(final Store s, final Cell cell) {
long kvSize = s.add(cell).getFirst();
if (this.rsAccounting != null) {
rsAccounting.addAndGetRegionReplayEditsSize(getRegionInfo().getRegionName(), kvSize);
}
return isFlushSize(this.addAndGetGlobalMemstoreSize(kvSize));
}
/*
* @param fs
* @param p File to check.
* @return True if file was zero-length (and if so, we'll delete it in here).
* @throws IOException
*/
private static boolean isZeroLengthThenDelete(final FileSystem fs, final Path p)
throws IOException {
FileStatus stat = fs.getFileStatus(p);
if (stat.getLen() > 0) return false;
LOG.warn("File " + p + " is zero-length, deleting.");
fs.delete(p, false);
return true;
}
protected HStore instantiateHStore(final HColumnDescriptor family) throws IOException {
return new HStore(this, family, this.conf);
}
@Override
public Store getStore(final byte[] column) {
return this.stores.get(column);
}
/**
* Return HStore instance. Does not do any copy: as the number of store is limited, we
* iterate on the list.
*/
private Store getStore(Cell cell) {
for (Map.Entry<byte[], Store> famStore : stores.entrySet()) {
if (Bytes.equals(
cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(),
famStore.getKey(), 0, famStore.getKey().length)) {
return famStore.getValue();
}
}
return null;
}
@Override
public List<Store> getStores() {
List<Store> list = new ArrayList<Store>(stores.size());
list.addAll(stores.values());
return list;
}
@Override
public List<String> getStoreFileList(final byte [][] columns)
throws IllegalArgumentException {
List<String> storeFileNames = new ArrayList<String>();
synchronized(closeLock) {
for(byte[] column : columns) {
Store store = this.stores.get(column);
if (store == null) {
throw new IllegalArgumentException("No column family : " +
new String(column) + " available");
}
for (StoreFile storeFile: store.getStorefiles()) {
storeFileNames.add(storeFile.getPath().toString());
}
logRegionFiles();
}
}
return storeFileNames;
}
//////////////////////////////////////////////////////////////////////////////
// Support code
//////////////////////////////////////////////////////////////////////////////
/** Make sure this is a valid row for the HRegion */
void checkRow(final byte [] row, String op) throws IOException {
if (!rowIsInRange(getRegionInfo(), row)) {
throw new WrongRegionException("Requested row out of range for " +
op + " on HRegion " + this + ", startKey='" +
Bytes.toStringBinary(getRegionInfo().getStartKey()) + "', getEndKey()='" +
Bytes.toStringBinary(getRegionInfo().getEndKey()) + "', row='" +
Bytes.toStringBinary(row) + "'");
}
}
@Override
public RowLock getRowLock(byte[] row, boolean waitForLock) throws IOException {
startRegionOperation();
try {
return getRowLockInternal(row, waitForLock);
} finally {
closeRegionOperation();
}
}
/**
* A version of getRowLock(byte[], boolean) to use when a region operation has already been
* started (the calling thread has already acquired the region-close-guard lock).
*/
protected RowLock getRowLockInternal(byte[] row, boolean waitForLock) throws IOException {
checkRow(row, "row lock");
HashedBytes rowKey = new HashedBytes(row);
RowLockContext rowLockContext = new RowLockContext(rowKey);
// loop until we acquire the row lock (unless !waitForLock)
while (true) {
RowLockContext existingContext = lockedRows.putIfAbsent(rowKey, rowLockContext);
if (existingContext == null) {
// Row is not already locked by any thread, use newly created context.
break;
} else if (existingContext.ownedByCurrentThread()) {
// Row is already locked by current thread, reuse existing context instead.
rowLockContext = existingContext;
break;
} else {
if (!waitForLock) {
return null;
}
TraceScope traceScope = null;
try {
if (Trace.isTracing()) {
traceScope = Trace.startSpan("HRegion.getRowLockInternal");
}
// Row is already locked by some other thread, give up or wait for it
if (!existingContext.latch.await(this.rowLockWaitDuration, TimeUnit.MILLISECONDS)) {
if(traceScope != null) {
traceScope.getSpan().addTimelineAnnotation("Failed to get row lock");
}
throw new IOException("Timed out waiting for lock for row: " + rowKey);
}
if (traceScope != null) traceScope.close();
traceScope = null;
} catch (InterruptedException ie) {
LOG.warn("Thread interrupted waiting for lock on row: " + rowKey);
InterruptedIOException iie = new InterruptedIOException();
iie.initCause(ie);
throw iie;
} finally {
if (traceScope != null) traceScope.close();
}
}
}
// allocate new lock for this thread
return rowLockContext.newLock();
}
/**
* Acquires a lock on the given row.
* The same thread may acquire multiple locks on the same row.
* @return the acquired row lock
* @throws IOException if the lock could not be acquired after waiting
*/
public RowLock getRowLock(byte[] row) throws IOException {
return getRowLock(row, true);
}
@Override
public void releaseRowLocks(List<RowLock> rowLocks) {
if (rowLocks != null) {
for (RowLock rowLock : rowLocks) {
rowLock.release();
}
rowLocks.clear();
}
}
/**
* Determines whether multiple column families are present
* Precondition: familyPaths is not null
*
* @param familyPaths List of Pair<byte[] column family, String hfilePath>
*/
private static boolean hasMultipleColumnFamilies(Collection<Pair<byte[], String>> familyPaths) {
boolean multipleFamilies = false;
byte[] family = null;
for (Pair<byte[], String> pair : familyPaths) {
byte[] fam = pair.getFirst();
if (family == null) {
family = fam;
} else if (!Bytes.equals(family, fam)) {
multipleFamilies = true;
break;
}
}
return multipleFamilies;
}
@Override
public boolean bulkLoadHFiles(Collection<Pair<byte[], String>> familyPaths, boolean assignSeqId,
BulkLoadListener bulkLoadListener) throws IOException {
long seqId = -1;
Map<byte[], List<Path>> storeFiles = new TreeMap<byte[], List<Path>>(Bytes.BYTES_COMPARATOR);
Preconditions.checkNotNull(familyPaths);
// we need writeLock for multi-family bulk load
startBulkRegionOperation(hasMultipleColumnFamilies(familyPaths));
try {
this.writeRequestsCount.increment();
// There possibly was a split that happened between when the split keys
// were gathered and before the HRegion's write lock was taken. We need
// to validate the HFile region before attempting to bulk load all of them
List<IOException> ioes = new ArrayList<IOException>();
List<Pair<byte[], String>> failures = new ArrayList<Pair<byte[], String>>();
for (Pair<byte[], String> p : familyPaths) {
byte[] familyName = p.getFirst();
String path = p.getSecond();
Store store = getStore(familyName);
if (store == null) {
IOException ioe = new org.apache.hadoop.hbase.DoNotRetryIOException(
"No such column family " + Bytes.toStringBinary(familyName));
ioes.add(ioe);
} else {
try {
store.assertBulkLoadHFileOk(new Path(path));
} catch (WrongRegionException wre) {
// recoverable (file doesn't fit in region)
failures.add(p);
} catch (IOException ioe) {
// unrecoverable (hdfs problem)
ioes.add(ioe);
}
}
}
// validation failed because of some sort of IO problem.
if (ioes.size() != 0) {
IOException e = MultipleIOException.createIOException(ioes);
LOG.error("There were one or more IO errors when checking if the bulk load is ok.", e);
throw e;
}
// validation failed, bail out before doing anything permanent.
if (failures.size() != 0) {
StringBuilder list = new StringBuilder();
for (Pair<byte[], String> p : failures) {
list.append("\n").append(Bytes.toString(p.getFirst())).append(" : ")
.append(p.getSecond());
}
// problem when validating
LOG.warn("There was a recoverable bulk load failure likely due to a" +
" split. These (family, HFile) pairs were not loaded: " + list);
return false;
}
// We need to assign a sequential ID that's in between two memstores in order to preserve
// the guarantee that all the edits lower than the highest sequential ID from all the
// HFiles are flushed on disk. See HBASE-10958. The sequence id returned when we flush is
// guaranteed to be one beyond the file made when we flushed (or if nothing to flush, it is
// a sequence id that we can be sure is beyond the last hfile written).
if (assignSeqId) {
FlushResult fs = flushcache(true, false);
if (fs.isFlushSucceeded()) {
seqId = ((FlushResultImpl)fs).flushSequenceId;
} else if (fs.getResult() == FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY) {
seqId = ((FlushResultImpl)fs).flushSequenceId;
} else {
throw new IOException("Could not bulk load with an assigned sequential ID because the "+
"flush didn't run. Reason for not flushing: " + ((FlushResultImpl)fs).failureReason);
}
}
for (Pair<byte[], String> p : familyPaths) {
byte[] familyName = p.getFirst();
String path = p.getSecond();
Store store = getStore(familyName);
try {
String finalPath = path;
if (bulkLoadListener != null) {
finalPath = bulkLoadListener.prepareBulkLoad(familyName, path);
}
Path commitedStoreFile = store.bulkLoadHFile(finalPath, seqId);
if(storeFiles.containsKey(familyName)) {
storeFiles.get(familyName).add(commitedStoreFile);
} else {
List<Path> storeFileNames = new ArrayList<Path>();
storeFileNames.add(commitedStoreFile);
storeFiles.put(familyName, storeFileNames);
}
if (bulkLoadListener != null) {
bulkLoadListener.doneBulkLoad(familyName, path);
}
} catch (IOException ioe) {
// A failure here can cause an atomicity violation that we currently
// cannot recover from since it is likely a failed HDFS operation.
// TODO Need a better story for reverting partial failures due to HDFS.
LOG.error("There was a partial failure due to IO when attempting to" +
" load " + Bytes.toString(p.getFirst()) + " : " + p.getSecond(), ioe);
if (bulkLoadListener != null) {
try {
bulkLoadListener.failedBulkLoad(familyName, path);
} catch (Exception ex) {
LOG.error("Error while calling failedBulkLoad for family " +
Bytes.toString(familyName) + " with path " + path, ex);
}
}
throw ioe;
}
}
return true;
} finally {
if (wal != null && !storeFiles.isEmpty()) {
// write a bulk load event when not all hfiles are loaded
try {
WALProtos.BulkLoadDescriptor loadDescriptor = ProtobufUtil.toBulkLoadDescriptor(
this.getRegionInfo().getTable(),
ByteStringer.wrap(this.getRegionInfo().getEncodedNameAsBytes()), storeFiles, seqId);
WALUtil.writeBulkLoadMarkerAndSync(wal, this.htableDescriptor, getRegionInfo(),
loadDescriptor, sequenceId);
} catch (IOException ioe) {
if (this.rsServices != null) {
// Have to abort region server because some hfiles has been loaded but we can't write
// the event into WAL
this.rsServices.abort("Failed to write bulk load event into WAL.", ioe);
}
}
}
closeBulkRegionOperation();
}
}
@Override
public boolean equals(Object o) {
return o instanceof HRegion && Bytes.equals(getRegionInfo().getRegionName(),
((HRegion) o).getRegionInfo().getRegionName());
}
@Override
public int hashCode() {
return Bytes.hashCode(getRegionInfo().getRegionName());
}
@Override
public String toString() {
return getRegionInfo().getRegionNameAsString();
}
/**
* RegionScannerImpl is used to combine scanners from multiple Stores (aka column families).
*/
class RegionScannerImpl implements RegionScanner {
// Package local for testability
KeyValueHeap storeHeap = null;
/** Heap of key-values that are not essential for the provided filters and are thus read
* on demand, if on-demand column family loading is enabled.*/
KeyValueHeap joinedHeap = null;
/**
* If the joined heap data gathering is interrupted due to scan limits, this will
* contain the row for which we are populating the values.*/
protected Cell joinedContinuationRow = null;
protected final byte[] stopRow;
private final FilterWrapper filter;
private ScannerContext defaultScannerContext;
protected int isScan;
private boolean filterClosed = false;
private long readPt;
private long maxResultSize;
protected HRegion region;
@Override
public HRegionInfo getRegionInfo() {
return region.getRegionInfo();
}
RegionScannerImpl(Scan scan, List<KeyValueScanner> additionalScanners, HRegion region)
throws IOException {
this.region = region;
this.maxResultSize = scan.getMaxResultSize();
if (scan.hasFilter()) {
this.filter = new FilterWrapper(scan.getFilter());
} else {
this.filter = null;
}
/**
* By default, calls to next/nextRaw must enforce the batch limit. Thus, construct a default
* scanner context that can be used to enforce the batch limit in the event that a
* ScannerContext is not specified during an invocation of next/nextRaw
*/
defaultScannerContext = ScannerContext.newBuilder().setBatchLimit(scan.getBatch()).build();
if (Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW) && !scan.isGetScan()) {
this.stopRow = null;
} else {
this.stopRow = scan.getStopRow();
}
// If we are doing a get, we want to be [startRow,endRow] normally
// it is [startRow,endRow) and if startRow=endRow we get nothing.
this.isScan = scan.isGetScan() ? -1 : 0;
// synchronize on scannerReadPoints so that nobody calculates
// getSmallestReadPoint, before scannerReadPoints is updated.
IsolationLevel isolationLevel = scan.getIsolationLevel();
synchronized(scannerReadPoints) {
this.readPt = getReadpoint(isolationLevel);
scannerReadPoints.put(this, this.readPt);
}
// Here we separate all scanners into two lists - scanner that provide data required
// by the filter to operate (scanners list) and all others (joinedScanners list).
List<KeyValueScanner> scanners = new ArrayList<KeyValueScanner>();
List<KeyValueScanner> joinedScanners = new ArrayList<KeyValueScanner>();
if (additionalScanners != null) {
scanners.addAll(additionalScanners);
}
for (Map.Entry<byte[], NavigableSet<byte[]>> entry :
scan.getFamilyMap().entrySet()) {
Store store = stores.get(entry.getKey());
KeyValueScanner scanner = store.getScanner(scan, entry.getValue(), this.readPt);
if (this.filter == null || !scan.doLoadColumnFamiliesOnDemand()
|| this.filter.isFamilyEssential(entry.getKey())) {
scanners.add(scanner);
} else {
joinedScanners.add(scanner);
}
}
initializeKVHeap(scanners, joinedScanners, region);
}
protected void initializeKVHeap(List<KeyValueScanner> scanners,
List<KeyValueScanner> joinedScanners, HRegion region)
throws IOException {
this.storeHeap = new KeyValueHeap(scanners, region.comparator);
if (!joinedScanners.isEmpty()) {
this.joinedHeap = new KeyValueHeap(joinedScanners, region.comparator);
}
}
@Override
public long getMaxResultSize() {
return maxResultSize;
}
@Override
public long getMvccReadPoint() {
return this.readPt;
}
@Override
public int getBatch() {
return this.defaultScannerContext.getBatchLimit();
}
/**
* Reset both the filter and the old filter.
*
* @throws IOException in case a filter raises an I/O exception.
*/
protected void resetFilters() throws IOException {
if (filter != null) {
filter.reset();
}
}
@Override
public boolean next(List<Cell> outResults)
throws IOException {
// apply the batching limit by default
return next(outResults, defaultScannerContext);
}
@Override
public synchronized boolean next(List<Cell> outResults, ScannerContext scannerContext) throws IOException {
if (this.filterClosed) {
throw new UnknownScannerException("Scanner was closed (timed out?) " +
"after we renewed it. Could be caused by a very slow scanner " +
"or a lengthy garbage collection");
}
startRegionOperation(Operation.SCAN);
readRequestsCount.increment();
try {
return nextRaw(outResults, scannerContext);
} finally {
closeRegionOperation(Operation.SCAN);
}
}
@Override
public boolean nextRaw(List<Cell> outResults) throws IOException {
// Use the RegionScanner's context by default
return nextRaw(outResults, defaultScannerContext);
}
@Override
public boolean nextRaw(List<Cell> outResults, ScannerContext scannerContext)
throws IOException {
if (storeHeap == null) {
// scanner is closed
throw new UnknownScannerException("Scanner was closed");
}
boolean moreValues;
if (outResults.isEmpty()) {
// Usually outResults is empty. This is true when next is called
// to handle scan or get operation.
moreValues = nextInternal(outResults, scannerContext);
} else {
List<Cell> tmpList = new ArrayList<Cell>();
moreValues = nextInternal(tmpList, scannerContext);
outResults.addAll(tmpList);
}
// If the size limit was reached it means a partial Result is being returned. Returning a
// partial Result means that we should not reset the filters; filters should only be reset in
// between rows
if (!scannerContext.partialResultFormed()) resetFilters();
if (isFilterDoneInternal()) {
moreValues = false;
}
return moreValues;
}
/**
* @return true if more cells exist after this batch, false if scanner is done
*/
private boolean populateFromJoinedHeap(List<Cell> results, ScannerContext scannerContext)
throws IOException {
assert joinedContinuationRow != null;
boolean moreValues =
populateResult(results, this.joinedHeap, scannerContext,
joinedContinuationRow.getRowArray(), joinedContinuationRow.getRowOffset(),
joinedContinuationRow.getRowLength());
if (!scannerContext.checkAnyLimitReached(LimitScope.BETWEEN_CELLS)) {
// We are done with this row, reset the continuation.
joinedContinuationRow = null;
}
// As the data is obtained from two independent heaps, we need to
// ensure that result list is sorted, because Result relies on that.
Collections.sort(results, comparator);
return moreValues;
}
/**
* Fetches records with currentRow into results list, until next row, batchLimit (if not -1) is
* reached, or remainingResultSize (if not -1) is reaced
* @param heap KeyValueHeap to fetch data from.It must be positioned on correct row before call.
* @param scannerContext
* @param currentRow Byte array with key we are fetching.
* @param offset offset for currentRow
* @param length length for currentRow
* @return state of last call to {@link KeyValueHeap#next()}
*/
private boolean populateResult(List<Cell> results, KeyValueHeap heap,
ScannerContext scannerContext, byte[] currentRow, int offset, short length)
throws IOException {
Cell nextKv;
boolean moreCellsInRow = false;
boolean tmpKeepProgress = scannerContext.getKeepProgress();
// Scanning between column families and thus the scope is between cells
LimitScope limitScope = LimitScope.BETWEEN_CELLS;
do {
// We want to maintain any progress that is made towards the limits while scanning across
// different column families. To do this, we toggle the keep progress flag on during calls
// to the StoreScanner to ensure that any progress made thus far is not wiped away.
scannerContext.setKeepProgress(true);
heap.next(results, scannerContext);
scannerContext.setKeepProgress(tmpKeepProgress);
nextKv = heap.peek();
moreCellsInRow = moreCellsInRow(nextKv, currentRow, offset, length);
if (scannerContext.checkBatchLimit(limitScope)) {
return scannerContext.setScannerState(NextState.BATCH_LIMIT_REACHED).hasMoreValues();
} else if (scannerContext.checkSizeLimit(limitScope)) {
ScannerContext.NextState state =
moreCellsInRow ? NextState.SIZE_LIMIT_REACHED_MID_ROW : NextState.SIZE_LIMIT_REACHED;
return scannerContext.setScannerState(state).hasMoreValues();
} else if (scannerContext.checkTimeLimit(limitScope)) {
ScannerContext.NextState state =
moreCellsInRow ? NextState.TIME_LIMIT_REACHED_MID_ROW : NextState.TIME_LIMIT_REACHED;
return scannerContext.setScannerState(state).hasMoreValues();
}
} while (moreCellsInRow);
return nextKv != null;
}
/**
* Based on the nextKv in the heap, and the current row, decide whether or not there are more
* cells to be read in the heap. If the row of the nextKv in the heap matches the current row
* then there are more cells to be read in the row.
* @param nextKv
* @param currentRow
* @param offset
* @param length
* @return true When there are more cells in the row to be read
*/
private boolean moreCellsInRow(final Cell nextKv, byte[] currentRow, int offset,
short length) {
return nextKv != null && CellUtil.matchingRow(nextKv, currentRow, offset, length);
}
/*
* @return True if a filter rules the scanner is over, done.
*/
@Override
public synchronized boolean isFilterDone() throws IOException {
return isFilterDoneInternal();
}
private boolean isFilterDoneInternal() throws IOException {
return this.filter != null && this.filter.filterAllRemaining();
}
private boolean nextInternal(List<Cell> results, ScannerContext scannerContext)
throws IOException {
if (!results.isEmpty()) {
throw new IllegalArgumentException("First parameter should be an empty list");
}
if (scannerContext == null) {
throw new IllegalArgumentException("Scanner context cannot be null");
}
RpcCallContext rpcCall = RpcServer.getCurrentCall();
// Save the initial progress from the Scanner context in these local variables. The progress
// may need to be reset a few times if rows are being filtered out so we save the initial
// progress.
int initialBatchProgress = scannerContext.getBatchProgress();
long initialSizeProgress = scannerContext.getSizeProgress();
long initialTimeProgress = scannerContext.getTimeProgress();
// The loop here is used only when at some point during the next we determine
// that due to effects of filters or otherwise, we have an empty row in the result.
// Then we loop and try again. Otherwise, we must get out on the first iteration via return,
// "true" if there's more data to read, "false" if there isn't (storeHeap is at a stop row,
// and joinedHeap has no more data to read for the last row (if set, joinedContinuationRow).
while (true) {
// Starting to scan a new row. Reset the scanner progress according to whether or not
// progress should be kept.
if (scannerContext.getKeepProgress()) {
// Progress should be kept. Reset to initial values seen at start of method invocation.
scannerContext
.setProgress(initialBatchProgress, initialSizeProgress, initialTimeProgress);
} else {
scannerContext.clearProgress();
}
if (rpcCall != null) {
// If a user specifies a too-restrictive or too-slow scanner, the
// client might time out and disconnect while the server side
// is still processing the request. We should abort aggressively
// in that case.
long afterTime = rpcCall.disconnectSince();
if (afterTime >= 0) {
throw new CallerDisconnectedException(
"Aborting on region " + getRegionInfo().getRegionNameAsString() + ", call " +
this + " after " + afterTime + " ms, since " +
"caller disconnected");
}
}
// Let's see what we have in the storeHeap.
Cell current = this.storeHeap.peek();
byte[] currentRow = null;
int offset = 0;
short length = 0;
if (current != null) {
currentRow = current.getRowArray();
offset = current.getRowOffset();
length = current.getRowLength();
}
boolean stopRow = isStopRow(currentRow, offset, length);
// When has filter row is true it means that the all the cells for a particular row must be
// read before a filtering decision can be made. This means that filters where hasFilterRow
// run the risk of encountering out of memory errors in the case that they are applied to a
// table that has very large rows.
boolean hasFilterRow = this.filter != null && this.filter.hasFilterRow();
// If filter#hasFilterRow is true, partial results are not allowed since allowing them
// would prevent the filters from being evaluated. Thus, if it is true, change the
// scope of any limits that could potentially create partial results to
// LimitScope.BETWEEN_ROWS so that those limits are not reached mid-row
if (hasFilterRow) {
if (LOG.isTraceEnabled()) {
LOG.trace("filter#hasFilterRow is true which prevents partial results from being "
+ " formed. Changing scope of limits that may create partials");
}
scannerContext.setSizeLimitScope(LimitScope.BETWEEN_ROWS);
scannerContext.setTimeLimitScope(LimitScope.BETWEEN_ROWS);
}
// Check if we were getting data from the joinedHeap and hit the limit.
// If not, then it's main path - getting results from storeHeap.
if (joinedContinuationRow == null) {
// First, check if we are at a stop row. If so, there are no more results.
if (stopRow) {
if (hasFilterRow) {
filter.filterRowCells(results);
}
return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
}
// Check if rowkey filter wants to exclude this row. If so, loop to next.
// Technically, if we hit limits before on this row, we don't need this call.
if (filterRowKey(currentRow, offset, length)) {
boolean moreRows = nextRow(currentRow, offset, length);
if (!moreRows) {
return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
}
results.clear();
continue;
}
// Ok, we are good, let's try to get some results from the main heap.
populateResult(results, this.storeHeap, scannerContext, currentRow, offset, length);
if (scannerContext.checkAnyLimitReached(LimitScope.BETWEEN_CELLS)) {
if (hasFilterRow) {
throw new IncompatibleFilterException(
"Filter whose hasFilterRow() returns true is incompatible with scans that must "
+ " stop mid-row because of a limit. ScannerContext:" + scannerContext);
}
return true;
}
Cell nextKv = this.storeHeap.peek();
stopRow = nextKv == null ||
isStopRow(nextKv.getRowArray(), nextKv.getRowOffset(), nextKv.getRowLength());
// save that the row was empty before filters applied to it.
final boolean isEmptyRow = results.isEmpty();
// We have the part of the row necessary for filtering (all of it, usually).
// First filter with the filterRow(List).
FilterWrapper.FilterRowRetCode ret = FilterWrapper.FilterRowRetCode.NOT_CALLED;
if (hasFilterRow) {
ret = filter.filterRowCellsWithRet(results);
// We don't know how the results have changed after being filtered. Must set progress
// according to contents of results now. However, a change in the results should not
// affect the time progress. Thus preserve whatever time progress has been made
long timeProgress = scannerContext.getTimeProgress();
if (scannerContext.getKeepProgress()) {
scannerContext.setProgress(initialBatchProgress, initialSizeProgress,
initialTimeProgress);
} else {
scannerContext.clearProgress();
}
scannerContext.setTimeProgress(timeProgress);
scannerContext.incrementBatchProgress(results.size());
for (Cell cell : results) {
scannerContext.incrementSizeProgress(CellUtil.estimatedHeapSizeOfWithoutTags(cell));
}
}
if ((isEmptyRow || ret == FilterWrapper.FilterRowRetCode.EXCLUDE) || filterRow()) {
results.clear();
boolean moreRows = nextRow(currentRow, offset, length);
if (!moreRows) {
return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
}
// This row was totally filtered out, if this is NOT the last row,
// we should continue on. Otherwise, nothing else to do.
if (!stopRow) continue;
return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
}
// Ok, we are done with storeHeap for this row.
// Now we may need to fetch additional, non-essential data into row.
// These values are not needed for filter to work, so we postpone their
// fetch to (possibly) reduce amount of data loads from disk.
if (this.joinedHeap != null) {
boolean mayHaveData = joinedHeapMayHaveData(currentRow, offset, length);
if (mayHaveData) {
joinedContinuationRow = current;
populateFromJoinedHeap(results, scannerContext);
if (scannerContext.checkAnyLimitReached(LimitScope.BETWEEN_CELLS)) {
return true;
}
}
}
} else {
// Populating from the joined heap was stopped by limits, populate some more.
populateFromJoinedHeap(results, scannerContext);
if (scannerContext.checkAnyLimitReached(LimitScope.BETWEEN_CELLS)) {
return true;
}
}
// We may have just called populateFromJoinedMap and hit the limits. If that is
// the case, we need to call it again on the next next() invocation.
if (joinedContinuationRow != null) {
return scannerContext.setScannerState(NextState.MORE_VALUES).hasMoreValues();
}
// Finally, we are done with both joinedHeap and storeHeap.
// Double check to prevent empty rows from appearing in result. It could be
// the case when SingleColumnValueExcludeFilter is used.
if (results.isEmpty()) {
boolean moreRows = nextRow(currentRow, offset, length);
if (!moreRows) {
return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
}
if (!stopRow) continue;
}
// We are done. Return the result.
if (stopRow) {
return scannerContext.setScannerState(NextState.NO_MORE_VALUES).hasMoreValues();
} else {
return scannerContext.setScannerState(NextState.MORE_VALUES).hasMoreValues();
}
}
}
/**
* @param currentRow
* @param offset
* @param length
* @return true when the joined heap may have data for the current row
* @throws IOException
*/
private boolean joinedHeapMayHaveData(byte[] currentRow, int offset, short length)
throws IOException {
Cell nextJoinedKv = joinedHeap.peek();
boolean matchCurrentRow =
nextJoinedKv != null && CellUtil.matchingRow(nextJoinedKv, currentRow, offset, length);
boolean matchAfterSeek = false;
// If the next value in the joined heap does not match the current row, try to seek to the
// correct row
if (!matchCurrentRow) {
Cell firstOnCurrentRow = KeyValueUtil.createFirstOnRow(currentRow, offset, length);
boolean seekSuccessful = this.joinedHeap.requestSeek(firstOnCurrentRow, true, true);
matchAfterSeek =
seekSuccessful && joinedHeap.peek() != null
&& CellUtil.matchingRow(joinedHeap.peek(), currentRow, offset, length);
}
return matchCurrentRow || matchAfterSeek;
}
/**
* This function is to maintain backward compatibility for 0.94 filters. HBASE-6429 combines
* both filterRow & filterRow(List<KeyValue> kvs) functions. While 0.94 code or older, it may
* not implement hasFilterRow as HBase-6429 expects because 0.94 hasFilterRow() only returns
* true when filterRow(List<KeyValue> kvs) is overridden not the filterRow(). Therefore, the
* filterRow() will be skipped.
*/
private boolean filterRow() throws IOException {
// when hasFilterRow returns true, filter.filterRow() will be called automatically inside
// filterRowCells(List<Cell> kvs) so we skip that scenario here.
return filter != null && (!filter.hasFilterRow())
&& filter.filterRow();
}
private boolean filterRowKey(byte[] row, int offset, short length) throws IOException {
return filter != null
&& filter.filterRowKey(row, offset, length);
}
protected boolean nextRow(byte [] currentRow, int offset, short length) throws IOException {
assert this.joinedContinuationRow == null: "Trying to go to next row during joinedHeap read.";
Cell next;
while ((next = this.storeHeap.peek()) != null &&
CellUtil.matchingRow(next, currentRow, offset, length)) {
this.storeHeap.next(MOCKED_LIST);
}
resetFilters();
// Calling the hook in CP which allows it to do a fast forward
return this.region.getCoprocessorHost() == null
|| this.region.getCoprocessorHost()
.postScannerFilterRow(this, currentRow, offset, length);
}
protected boolean isStopRow(byte[] currentRow, int offset, short length) {
return currentRow == null ||
(stopRow != null &&
comparator.compareRows(stopRow, 0, stopRow.length,
currentRow, offset, length) <= isScan);
}
@Override
public synchronized void close() {
if (storeHeap != null) {
storeHeap.close();
storeHeap = null;
}
if (joinedHeap != null) {
joinedHeap.close();
joinedHeap = null;
}
// no need to synchronize here.
scannerReadPoints.remove(this);
this.filterClosed = true;
}
KeyValueHeap getStoreHeapForTesting() {
return storeHeap;
}
@Override
public synchronized boolean reseek(byte[] row) throws IOException {
if (row == null) {
throw new IllegalArgumentException("Row cannot be null.");
}
boolean result = false;
startRegionOperation();
try {
KeyValue kv = KeyValueUtil.createFirstOnRow(row);
// use request seek to make use of the lazy seek option. See HBASE-5520
result = this.storeHeap.requestSeek(kv, true, true);
if (this.joinedHeap != null) {
result = this.joinedHeap.requestSeek(kv, true, true) || result;
}
} finally {
closeRegionOperation();
}
return result;
}
}
// Utility methods
/**
* A utility method to create new instances of HRegion based on the
* {@link HConstants#REGION_IMPL} configuration property.
* @param tableDir qualified path of directory where region should be located,
* usually the table directory.
* @param wal The WAL is the outbound log for any updates to the HRegion
* The wal file is a logfile from the previous execution that's
* custom-computed for this HRegion. The HRegionServer computes and sorts the
* appropriate wal info for this HRegion. If there is a previous file
* (implying that the HRegion has been written-to before), then read it from
* the supplied path.
* @param fs is the filesystem.
* @param conf is global configuration settings.
* @param regionInfo - HRegionInfo that describes the region
* is new), then read them from the supplied path.
* @param htd the table descriptor
* @return the new instance
*/
static HRegion newHRegion(Path tableDir, WAL wal, FileSystem fs,
Configuration conf, HRegionInfo regionInfo, final HTableDescriptor htd,
RegionServerServices rsServices) {
try {
@SuppressWarnings("unchecked")
Class<? extends HRegion> regionClass =
(Class<? extends HRegion>) conf.getClass(HConstants.REGION_IMPL, HRegion.class);
Constructor<? extends HRegion> c =
regionClass.getConstructor(Path.class, WAL.class, FileSystem.class,
Configuration.class, HRegionInfo.class, HTableDescriptor.class,
RegionServerServices.class);
return c.newInstance(tableDir, wal, fs, conf, regionInfo, htd, rsServices);
} catch (Throwable e) {
// todo: what should I throw here?
throw new IllegalStateException("Could not instantiate a region instance.", e);
}
}
/**
* Convenience method creating new HRegions. Used by createTable and by the
* bootstrap code in the HMaster constructor.
* Note, this method creates an {@link WAL} for the created region. It
* needs to be closed explicitly. Use {@link HRegion#getWAL()} to get
* access. <b>When done with a region created using this method, you will
* need to explicitly close the {@link WAL} it created too; it will not be
* done for you. Not closing the wal will leave at least a daemon thread
* running.</b> Call {@link #closeHRegion(HRegion)} and it will do
* necessary cleanup for you.
* @param info Info for region to create.
* @param rootDir Root directory for HBase instance
* @return new HRegion
*
* @throws IOException
*/
public static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
final Configuration conf, final HTableDescriptor hTableDescriptor)
throws IOException {
return createHRegion(info, rootDir, conf, hTableDescriptor, null);
}
/**
* This will do the necessary cleanup a call to
* {@link #createHRegion(HRegionInfo, Path, Configuration, HTableDescriptor)}
* requires. This method will close the region and then close its
* associated {@link WAL} file. You can still use it if you call the other createHRegion,
* the one that takes an {@link WAL} instance but don't be surprised by the
* call to the {@link WAL#close()} on the {@link WAL} the
* HRegion was carrying.
* @throws IOException
*/
public static void closeHRegion(final HRegion r) throws IOException {
if (r == null) return;
r.close();
if (r.getWAL() == null) return;
r.getWAL().close();
}
/**
* Convenience method creating new HRegions. Used by createTable.
* The {@link WAL} for the created region needs to be closed explicitly.
* Use {@link HRegion#getWAL()} to get access.
*
* @param info Info for region to create.
* @param rootDir Root directory for HBase instance
* @param wal shared WAL
* @param initialize - true to initialize the region
* @return new HRegion
*
* @throws IOException
*/
public static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
final Configuration conf,
final HTableDescriptor hTableDescriptor,
final WAL wal,
final boolean initialize)
throws IOException {
return createHRegion(info, rootDir, conf, hTableDescriptor,
wal, initialize, false);
}
/**
* Convenience method creating new HRegions. Used by createTable.
* The {@link WAL} for the created region needs to be closed
* explicitly, if it is not null.
* Use {@link HRegion#getWAL()} to get access.
*
* @param info Info for region to create.
* @param rootDir Root directory for HBase instance
* @param wal shared WAL
* @param initialize - true to initialize the region
* @param ignoreWAL - true to skip generate new wal if it is null, mostly for createTable
* @return new HRegion
* @throws IOException
*/
public static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
final Configuration conf,
final HTableDescriptor hTableDescriptor,
final WAL wal,
final boolean initialize, final boolean ignoreWAL)
throws IOException {
Path tableDir = FSUtils.getTableDir(rootDir, info.getTable());
return createHRegion(info, rootDir, tableDir, conf, hTableDescriptor, wal, initialize,
ignoreWAL);
}
/**
* Convenience method creating new HRegions. Used by createTable.
* The {@link WAL} for the created region needs to be closed
* explicitly, if it is not null.
* Use {@link HRegion#getWAL()} to get access.
*
* @param info Info for region to create.
* @param rootDir Root directory for HBase instance
* @param tableDir table directory
* @param wal shared WAL
* @param initialize - true to initialize the region
* @param ignoreWAL - true to skip generate new wal if it is null, mostly for createTable
* @return new HRegion
* @throws IOException
*/
public static HRegion createHRegion(final HRegionInfo info, final Path rootDir, final Path tableDir,
final Configuration conf,
final HTableDescriptor hTableDescriptor,
final WAL wal,
final boolean initialize, final boolean ignoreWAL)
throws IOException {
LOG.info("creating HRegion " + info.getTable().getNameAsString()
+ " HTD == " + hTableDescriptor + " RootDir = " + rootDir +
" Table name == " + info.getTable().getNameAsString());
FileSystem fs = FileSystem.get(conf);
HRegionFileSystem.createRegionOnFileSystem(conf, fs, tableDir, info);
WAL effectiveWAL = wal;
if (wal == null && !ignoreWAL) {
// TODO HBASE-11983 There'll be no roller for this wal?
// The WAL subsystem will use the default rootDir rather than the passed in rootDir
// unless I pass along via the conf.
Configuration confForWAL = new Configuration(conf);
confForWAL.set(HConstants.HBASE_DIR, rootDir.toString());
effectiveWAL = (new WALFactory(confForWAL,
Collections.<WALActionsListener>singletonList(new MetricsWAL()),
"hregion-" + RandomStringUtils.randomNumeric(8))).
getWAL(info.getEncodedNameAsBytes());
}
HRegion region = HRegion.newHRegion(tableDir,
effectiveWAL, fs, conf, info, hTableDescriptor, null);
if (initialize) {
// If initializing, set the sequenceId. It is also required by WALPerformanceEvaluation when
// verifying the WALEdits.
region.setSequenceId(region.initialize(null));
}
return region;
}
public static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
final Configuration conf,
final HTableDescriptor hTableDescriptor,
final WAL wal)
throws IOException {
return createHRegion(info, rootDir, conf, hTableDescriptor, wal, true);
}
/**
* Open a Region.
* @param info Info for region to be opened.
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @return new HRegion
*
* @throws IOException
*/
public static HRegion openHRegion(final HRegionInfo info,
final HTableDescriptor htd, final WAL wal,
final Configuration conf)
throws IOException {
return openHRegion(info, htd, wal, conf, null, null);
}
/**
* Open a Region.
* @param info Info for region to be opened
* @param htd the table descriptor
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @param conf The Configuration object to use.
* @param rsServices An interface we can request flushes against.
* @param reporter An interface we can report progress against.
* @return new HRegion
*
* @throws IOException
*/
public static HRegion openHRegion(final HRegionInfo info,
final HTableDescriptor htd, final WAL wal, final Configuration conf,
final RegionServerServices rsServices,
final CancelableProgressable reporter)
throws IOException {
return openHRegion(FSUtils.getRootDir(conf), info, htd, wal, conf, rsServices, reporter);
}
/**
* Open a Region.
* @param rootDir Root directory for HBase instance
* @param info Info for region to be opened.
* @param htd the table descriptor
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @param conf The Configuration object to use.
* @return new HRegion
* @throws IOException
*/
public static HRegion openHRegion(Path rootDir, final HRegionInfo info,
final HTableDescriptor htd, final WAL wal, final Configuration conf)
throws IOException {
return openHRegion(rootDir, info, htd, wal, conf, null, null);
}
/**
* Open a Region.
* @param rootDir Root directory for HBase instance
* @param info Info for region to be opened.
* @param htd the table descriptor
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @param conf The Configuration object to use.
* @param rsServices An interface we can request flushes against.
* @param reporter An interface we can report progress against.
* @return new HRegion
* @throws IOException
*/
public static HRegion openHRegion(final Path rootDir, final HRegionInfo info,
final HTableDescriptor htd, final WAL wal, final Configuration conf,
final RegionServerServices rsServices,
final CancelableProgressable reporter)
throws IOException {
FileSystem fs = null;
if (rsServices != null) {
fs = rsServices.getFileSystem();
}
if (fs == null) {
fs = FileSystem.get(conf);
}
return openHRegion(conf, fs, rootDir, info, htd, wal, rsServices, reporter);
}
/**
* Open a Region.
* @param conf The Configuration object to use.
* @param fs Filesystem to use
* @param rootDir Root directory for HBase instance
* @param info Info for region to be opened.
* @param htd the table descriptor
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @return new HRegion
* @throws IOException
*/
public static HRegion openHRegion(final Configuration conf, final FileSystem fs,
final Path rootDir, final HRegionInfo info, final HTableDescriptor htd, final WAL wal)
throws IOException {
return openHRegion(conf, fs, rootDir, info, htd, wal, null, null);
}
/**
* Open a Region.
* @param conf The Configuration object to use.
* @param fs Filesystem to use
* @param rootDir Root directory for HBase instance
* @param info Info for region to be opened.
* @param htd the table descriptor
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @param rsServices An interface we can request flushes against.
* @param reporter An interface we can report progress against.
* @return new HRegion
* @throws IOException
*/
public static HRegion openHRegion(final Configuration conf, final FileSystem fs,
final Path rootDir, final HRegionInfo info, final HTableDescriptor htd, final WAL wal,
final RegionServerServices rsServices, final CancelableProgressable reporter)
throws IOException {
Path tableDir = FSUtils.getTableDir(rootDir, info.getTable());
return openHRegion(conf, fs, rootDir, tableDir, info, htd, wal, rsServices, reporter);
}
/**
* Open a Region.
* @param conf The Configuration object to use.
* @param fs Filesystem to use
* @param rootDir Root directory for HBase instance
* @param info Info for region to be opened.
* @param htd the table descriptor
* @param wal WAL for region to use. This method will call
* WAL#setSequenceNumber(long) passing the result of the call to
* HRegion#getMinSequenceId() to ensure the wal id is properly kept
* up. HRegionStore does this every time it opens a new region.
* @param rsServices An interface we can request flushes against.
* @param reporter An interface we can report progress against.
* @return new HRegion
* @throws IOException
*/
public static HRegion openHRegion(final Configuration conf, final FileSystem fs,
final Path rootDir, final Path tableDir, final HRegionInfo info, final HTableDescriptor htd,
final WAL wal, final RegionServerServices rsServices,
final CancelableProgressable reporter)
throws IOException {
if (info == null) throw new NullPointerException("Passed region info is null");
if (LOG.isDebugEnabled()) {
LOG.debug("Opening region: " + info);
}
HRegion r = HRegion.newHRegion(tableDir, wal, fs, conf, info, htd, rsServices);
return r.openHRegion(reporter);
}
/**
* Useful when reopening a closed region (normally for unit tests)
* @param other original object
* @param reporter An interface we can report progress against.
* @return new HRegion
* @throws IOException
*/
public static HRegion openHRegion(final HRegion other, final CancelableProgressable reporter)
throws IOException {
HRegionFileSystem regionFs = other.getRegionFileSystem();
HRegion r = newHRegion(regionFs.getTableDir(), other.getWAL(), regionFs.getFileSystem(),
other.baseConf, other.getRegionInfo(), other.getTableDesc(), null);
return r.openHRegion(reporter);
}
public static Region openHRegion(final Region other, final CancelableProgressable reporter)
throws IOException {
return openHRegion((HRegion)other, reporter);
}
/**
* Open HRegion.
* Calls initialize and sets sequenceId.
* @return Returns <code>this</code>
* @throws IOException
*/
protected HRegion openHRegion(final CancelableProgressable reporter)
throws IOException {
// Refuse to open the region if we are missing local compression support
checkCompressionCodecs();
// Refuse to open the region if encryption configuration is incorrect or
// codec support is missing
checkEncryption();
// Refuse to open the region if a required class cannot be loaded
checkClassLoading();
this.openSeqNum = initialize(reporter);
this.setSequenceId(openSeqNum);
if (wal != null && getRegionServerServices() != null && !writestate.readOnly
&& !isRecovering) {
// Only write the region open event marker to WAL if (1) we are not read-only
// (2) dist log replay is off or we are not recovering. In case region is
// recovering, the open event will be written at setRecovering(false)
writeRegionOpenMarker(wal, openSeqNum);
}
return this;
}
public static void warmupHRegion(final HRegionInfo info,
final HTableDescriptor htd, final WAL wal, final Configuration conf,
final RegionServerServices rsServices,
final CancelableProgressable reporter)
throws IOException {
if (info == null) throw new NullPointerException("Passed region info is null");
if (LOG.isDebugEnabled()) {
LOG.debug("HRegion.Warming up region: " + info);
}
Path rootDir = FSUtils.getRootDir(conf);
Path tableDir = FSUtils.getTableDir(rootDir, info.getTable());
FileSystem fs = null;
if (rsServices != null) {
fs = rsServices.getFileSystem();
}
if (fs == null) {
fs = FileSystem.get(conf);
}
HRegion r = HRegion.newHRegion(tableDir, wal, fs, conf, info, htd, rsServices);
r.initializeWarmup(reporter);
r.close();
}
private void checkCompressionCodecs() throws IOException {
for (HColumnDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
CompressionTest.testCompression(fam.getCompression());
CompressionTest.testCompression(fam.getCompactionCompression());
}
}
private void checkEncryption() throws IOException {
for (HColumnDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
EncryptionTest.testEncryption(conf, fam.getEncryptionType(), fam.getEncryptionKey());
}
}
private void checkClassLoading() throws IOException {
RegionSplitPolicy.getSplitPolicyClass(this.htableDescriptor, conf);
RegionCoprocessorHost.testTableCoprocessorAttrs(conf, this.htableDescriptor);
}
/**
* Create a daughter region from given a temp directory with the region data.
* @param hri Spec. for daughter region to open.
* @throws IOException
*/
HRegion createDaughterRegionFromSplits(final HRegionInfo hri) throws IOException {
// Move the files from the temporary .splits to the final /table/region directory
fs.commitDaughterRegion(hri);
// Create the daughter HRegion instance
HRegion r = HRegion.newHRegion(this.fs.getTableDir(), this.getWAL(), fs.getFileSystem(),
this.getBaseConf(), hri, this.getTableDesc(), rsServices);
r.readRequestsCount.set(this.getReadRequestsCount() / 2);
r.writeRequestsCount.set(this.getWriteRequestsCount() / 2);
return r;
}
/**
* Create a merged region given a temp directory with the region data.
* @param region_b another merging region
* @return merged HRegion
* @throws IOException
*/
HRegion createMergedRegionFromMerges(final HRegionInfo mergedRegionInfo,
final HRegion region_b) throws IOException {
HRegion r = HRegion.newHRegion(this.fs.getTableDir(), this.getWAL(),
fs.getFileSystem(), this.getBaseConf(), mergedRegionInfo,
this.getTableDesc(), this.rsServices);
r.readRequestsCount.set(this.getReadRequestsCount()
+ region_b.getReadRequestsCount());
r.writeRequestsCount.set(this.getWriteRequestsCount()
+ region_b.getWriteRequestsCount());
this.fs.commitMergedRegion(mergedRegionInfo);
return r;
}
/**
* Inserts a new region's meta information into the passed
* <code>meta</code> region. Used by the HMaster bootstrap code adding
* new table to hbase:meta table.
*
* @param meta hbase:meta HRegion to be updated
* @param r HRegion to add to <code>meta</code>
*
* @throws IOException
*/
// TODO remove since only test and merge use this
public static void addRegionToMETA(final HRegion meta, final HRegion r) throws IOException {
meta.checkResources();
// The row key is the region name
byte[] row = r.getRegionInfo().getRegionName();
final long now = EnvironmentEdgeManager.currentTime();
final List<Cell> cells = new ArrayList<Cell>(2);
cells.add(new KeyValue(row, HConstants.CATALOG_FAMILY,
HConstants.REGIONINFO_QUALIFIER, now,
r.getRegionInfo().toByteArray()));
// Set into the root table the version of the meta table.
cells.add(new KeyValue(row, HConstants.CATALOG_FAMILY,
HConstants.META_VERSION_QUALIFIER, now,
Bytes.toBytes(HConstants.META_VERSION)));
meta.put(row, HConstants.CATALOG_FAMILY, cells);
}
/**
* Computes the Path of the HRegion
*
* @param tabledir qualified path for table
* @param name ENCODED region name
* @return Path of HRegion directory
*/
@Deprecated
public static Path getRegionDir(final Path tabledir, final String name) {
return new Path(tabledir, name);
}
/**
* Computes the Path of the HRegion
*
* @param rootdir qualified path of HBase root directory
* @param info HRegionInfo for the region
* @return qualified path of region directory
*/
@Deprecated
@VisibleForTesting
public static Path getRegionDir(final Path rootdir, final HRegionInfo info) {
return new Path(
FSUtils.getTableDir(rootdir, info.getTable()), info.getEncodedName());
}
/**
* Determines if the specified row is within the row range specified by the
* specified HRegionInfo
*
* @param info HRegionInfo that specifies the row range
* @param row row to be checked
* @return true if the row is within the range specified by the HRegionInfo
*/
public static boolean rowIsInRange(HRegionInfo info, final byte [] row) {
return ((info.getStartKey().length == 0) ||
(Bytes.compareTo(info.getStartKey(), row) <= 0)) &&
((info.getEndKey().length == 0) ||
(Bytes.compareTo(info.getEndKey(), row) > 0));
}
/**
* Merge two HRegions. The regions must be adjacent and must not overlap.
*
* @return new merged HRegion
* @throws IOException
*/
public static HRegion mergeAdjacent(final HRegion srcA, final HRegion srcB)
throws IOException {
HRegion a = srcA;
HRegion b = srcB;
// Make sure that srcA comes first; important for key-ordering during
// write of the merged file.
if (srcA.getRegionInfo().getStartKey() == null) {
if (srcB.getRegionInfo().getStartKey() == null) {
throw new IOException("Cannot merge two regions with null start key");
}
// A's start key is null but B's isn't. Assume A comes before B
} else if ((srcB.getRegionInfo().getStartKey() == null) ||
(Bytes.compareTo(srcA.getRegionInfo().getStartKey(),
srcB.getRegionInfo().getStartKey()) > 0)) {
a = srcB;
b = srcA;
}
if (!(Bytes.compareTo(a.getRegionInfo().getEndKey(),
b.getRegionInfo().getStartKey()) == 0)) {
throw new IOException("Cannot merge non-adjacent regions");
}
return merge(a, b);
}
/**
* Merge two regions whether they are adjacent or not.
*
* @param a region a
* @param b region b
* @return new merged region
* @throws IOException
*/
public static HRegion merge(final HRegion a, final HRegion b) throws IOException {
if (!a.getRegionInfo().getTable().equals(b.getRegionInfo().getTable())) {
throw new IOException("Regions do not belong to the same table");
}
FileSystem fs = a.getRegionFileSystem().getFileSystem();
// Make sure each region's cache is empty
a.flush(true);
b.flush(true);
// Compact each region so we only have one store file per family
a.compact(true);
if (LOG.isDebugEnabled()) {
LOG.debug("Files for region: " + a);
a.getRegionFileSystem().logFileSystemState(LOG);
}
b.compact(true);
if (LOG.isDebugEnabled()) {
LOG.debug("Files for region: " + b);
b.getRegionFileSystem().logFileSystemState(LOG);
}
RegionMergeTransactionImpl rmt = new RegionMergeTransactionImpl(a, b, true);
if (!rmt.prepare(null)) {
throw new IOException("Unable to merge regions " + a + " and " + b);
}
HRegionInfo mergedRegionInfo = rmt.getMergedRegionInfo();
LOG.info("starting merge of regions: " + a + " and " + b
+ " into new region " + mergedRegionInfo.getRegionNameAsString()
+ " with start key <"
+ Bytes.toStringBinary(mergedRegionInfo.getStartKey())
+ "> and end key <"
+ Bytes.toStringBinary(mergedRegionInfo.getEndKey()) + ">");
HRegion dstRegion;
try {
dstRegion = (HRegion)rmt.execute(null, null);
} catch (IOException ioe) {
rmt.rollback(null, null);
throw new IOException("Failed merging region " + a + " and " + b
+ ", and successfully rolled back");
}
dstRegion.compact(true);
if (LOG.isDebugEnabled()) {
LOG.debug("Files for new region");
dstRegion.getRegionFileSystem().logFileSystemState(LOG);
}
if (dstRegion.getRegionFileSystem().hasReferences(dstRegion.getTableDesc())) {
throw new IOException("Merged region " + dstRegion
+ " still has references after the compaction, is compaction canceled?");
}
// Archiving the 'A' region
HFileArchiver.archiveRegion(a.getBaseConf(), fs, a.getRegionInfo());
// Archiving the 'B' region
HFileArchiver.archiveRegion(b.getBaseConf(), fs, b.getRegionInfo());
LOG.info("merge completed. New region is " + dstRegion);
return dstRegion;
}
@Override
public Result get(final Get get) throws IOException {
checkRow(get.getRow(), "Get");
// Verify families are all valid
if (get.hasFamilies()) {
for (byte [] family: get.familySet()) {
checkFamily(family);
}
} else { // Adding all families to scanner
for (byte[] family: this.htableDescriptor.getFamiliesKeys()) {
get.addFamily(family);
}
}
List<Cell> results = get(get, true);
boolean stale = this.getRegionInfo().getReplicaId() != 0;
return Result.create(results, get.isCheckExistenceOnly() ? !results.isEmpty() : null, stale);
}
@Override
public List<Cell> get(Get get, boolean withCoprocessor) throws IOException {
List<Cell> results = new ArrayList<Cell>();
// pre-get CP hook
if (withCoprocessor && (coprocessorHost != null)) {
if (coprocessorHost.preGet(get, results)) {
return results;
}
}
Scan scan = new Scan(get);
RegionScanner scanner = null;
try {
scanner = getScanner(scan);
scanner.next(results);
} finally {
if (scanner != null)
scanner.close();
}
// post-get CP hook
if (withCoprocessor && (coprocessorHost != null)) {
coprocessorHost.postGet(get, results);
}
// do after lock
if (this.metricsRegion != null) {
long totalSize = 0L;
for (Cell cell : results) {
totalSize += CellUtil.estimatedSerializedSizeOf(cell);
}
this.metricsRegion.updateGet(totalSize);
}
return results;
}
public void mutateRow(RowMutations rm) throws IOException {
// Don't need nonces here - RowMutations only supports puts and deletes
mutateRowsWithLocks(rm.getMutations(), Collections.singleton(rm.getRow()));
}
/**
* Perform atomic mutations within the region w/o nonces.
* See {@link #mutateRowsWithLocks(Collection, Collection, long, long)}
*/
public void mutateRowsWithLocks(Collection<Mutation> mutations,
Collection<byte[]> rowsToLock) throws IOException {
mutateRowsWithLocks(mutations, rowsToLock, HConstants.NO_NONCE, HConstants.NO_NONCE);
}
/**
* Perform atomic mutations within the region.
* @param mutations The list of mutations to perform.
* <code>mutations</code> can contain operations for multiple rows.
* Caller has to ensure that all rows are contained in this region.
* @param rowsToLock Rows to lock
* @param nonceGroup Optional nonce group of the operation (client Id)
* @param nonce Optional nonce of the operation (unique random id to ensure "more idempotence")
* If multiple rows are locked care should be taken that
* <code>rowsToLock</code> is sorted in order to avoid deadlocks.
* @throws IOException
*/
public void mutateRowsWithLocks(Collection<Mutation> mutations,
Collection<byte[]> rowsToLock, long nonceGroup, long nonce) throws IOException {
MultiRowMutationProcessor proc = new MultiRowMutationProcessor(mutations, rowsToLock);
processRowsWithLocks(proc, -1, nonceGroup, nonce);
}
/**
* @return the current load statistics for the the region
*/
public ClientProtos.RegionLoadStats getRegionStats() {
if (!regionStatsEnabled) {
return null;
}
ClientProtos.RegionLoadStats.Builder stats = ClientProtos.RegionLoadStats.newBuilder();
stats.setMemstoreLoad((int) (Math.min(100, (this.memstoreSize.get() * 100) / this
.memstoreFlushSize)));
stats.setHeapOccupancy((int)rsServices.getHeapMemoryManager().getHeapOccupancyPercent()*100);
return stats.build();
}
@Override
public void processRowsWithLocks(RowProcessor<?,?> processor) throws IOException {
processRowsWithLocks(processor, rowProcessorTimeout, HConstants.NO_NONCE,
HConstants.NO_NONCE);
}
@Override
public void processRowsWithLocks(RowProcessor<?,?> processor, long nonceGroup, long nonce)
throws IOException {
processRowsWithLocks(processor, rowProcessorTimeout, nonceGroup, nonce);
}
@Override
public void processRowsWithLocks(RowProcessor<?,?> processor, long timeout,
long nonceGroup, long nonce) throws IOException {
for (byte[] row : processor.getRowsToLock()) {
checkRow(row, "processRowsWithLocks");
}
if (!processor.readOnly()) {
checkReadOnly();
}
checkResources();
startRegionOperation();
WALEdit walEdit = new WALEdit();
// 1. Run pre-process hook
try {
processor.preProcess(this, walEdit);
} catch (IOException e) {
closeRegionOperation();
throw e;
}
// Short circuit the read only case
if (processor.readOnly()) {
try {
long now = EnvironmentEdgeManager.currentTime();
doProcessRowWithTimeout(
processor, now, this, null, null, timeout);
processor.postProcess(this, walEdit, true);
} finally {
closeRegionOperation();
}
return;
}
MultiVersionConsistencyControl.WriteEntry writeEntry = null;
boolean locked;
boolean walSyncSuccessful = false;
List<RowLock> acquiredRowLocks;
long addedSize = 0;
List<Mutation> mutations = new ArrayList<Mutation>();
List<Cell> memstoreCells = new ArrayList<Cell>();
Collection<byte[]> rowsToLock = processor.getRowsToLock();
long mvccNum = 0;
WALKey walKey = null;
try {
// 2. Acquire the row lock(s)
acquiredRowLocks = new ArrayList<RowLock>(rowsToLock.size());
for (byte[] row : rowsToLock) {
// Attempt to lock all involved rows, throw if any lock times out
acquiredRowLocks.add(getRowLock(row));
}
// 3. Region lock
lock(this.updatesLock.readLock(), acquiredRowLocks.size() == 0 ? 1 : acquiredRowLocks.size());
locked = true;
// Get a mvcc write number
mvccNum = MultiVersionConsistencyControl.getPreAssignedWriteNumber(this.sequenceId);
long now = EnvironmentEdgeManager.currentTime();
try {
// 4. Let the processor scan the rows, generate mutations and add
// waledits
doProcessRowWithTimeout(
processor, now, this, mutations, walEdit, timeout);
if (!mutations.isEmpty()) {
// 5. Start mvcc transaction
writeEntry = mvcc.beginMemstoreInsertWithSeqNum(mvccNum);
// 6. Call the preBatchMutate hook
processor.preBatchMutate(this, walEdit);
// 7. Apply to memstore
for (Mutation m : mutations) {
// Handle any tag based cell features
rewriteCellTags(m.getFamilyCellMap(), m);
for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
Cell cell = cellScanner.current();
CellUtil.setSequenceId(cell, mvccNum);
Store store = getStore(cell);
if (store == null) {
checkFamily(CellUtil.cloneFamily(cell));
// unreachable
}
Pair<Long, Cell> ret = store.add(cell);
addedSize += ret.getFirst();
memstoreCells.add(ret.getSecond());
}
}
long txid = 0;
// 8. Append no sync
if (!walEdit.isEmpty()) {
// we use HLogKey here instead of WALKey directly to support legacy coprocessors.
walKey = new HLogKey(this.getRegionInfo().getEncodedNameAsBytes(),
this.htableDescriptor.getTableName(), WALKey.NO_SEQUENCE_ID, now,
processor.getClusterIds(), nonceGroup, nonce);
txid = this.wal.append(this.htableDescriptor, this.getRegionInfo(),
walKey, walEdit, getSequenceId(), true, memstoreCells);
}
if(walKey == null){
// since we use wal sequence Id as mvcc, for SKIP_WAL changes we need a "faked" WALEdit
// to get a sequence id assigned which is done by FSWALEntry#stampRegionSequenceId
walKey = this.appendEmptyEdit(this.wal, memstoreCells);
}
// 9. Release region lock
if (locked) {
this.updatesLock.readLock().unlock();
locked = false;
}
// 10. Release row lock(s)
releaseRowLocks(acquiredRowLocks);
// 11. Sync edit log
if (txid != 0) {
syncOrDefer(txid, getEffectiveDurability(processor.useDurability()));
}
walSyncSuccessful = true;
// 12. call postBatchMutate hook
processor.postBatchMutate(this);
}
} finally {
if (!mutations.isEmpty() && !walSyncSuccessful) {
LOG.warn("Wal sync failed. Roll back " + mutations.size() +
" memstore keyvalues for row(s):" + StringUtils.byteToHexString(
processor.getRowsToLock().iterator().next()) + "...");
for (Mutation m : mutations) {
for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
Cell cell = cellScanner.current();
getStore(cell).rollback(cell);
}
}
}
// 13. Roll mvcc forward
if (writeEntry != null) {
mvcc.completeMemstoreInsertWithSeqNum(writeEntry, walKey);
}
if (locked) {
this.updatesLock.readLock().unlock();
}
// release locks if some were acquired but another timed out
releaseRowLocks(acquiredRowLocks);
}
// 14. Run post-process hook
processor.postProcess(this, walEdit, walSyncSuccessful);
} finally {
closeRegionOperation();
if (!mutations.isEmpty() &&
isFlushSize(this.addAndGetGlobalMemstoreSize(addedSize))) {
requestFlush();
}
}
}
private void doProcessRowWithTimeout(final RowProcessor<?,?> processor,
final long now,
final HRegion region,
final List<Mutation> mutations,
final WALEdit walEdit,
final long timeout) throws IOException {
// Short circuit the no time bound case.
if (timeout < 0) {
try {
processor.process(now, region, mutations, walEdit);
} catch (IOException e) {
LOG.warn("RowProcessor:" + processor.getClass().getName() +
" throws Exception on row(s):" +
Bytes.toStringBinary(
processor.getRowsToLock().iterator().next()) + "...", e);
throw e;
}
return;
}
// Case with time bound
FutureTask<Void> task =
new FutureTask<Void>(new Callable<Void>() {
@Override
public Void call() throws IOException {
try {
processor.process(now, region, mutations, walEdit);
return null;
} catch (IOException e) {
LOG.warn("RowProcessor:" + processor.getClass().getName() +
" throws Exception on row(s):" +
Bytes.toStringBinary(
processor.getRowsToLock().iterator().next()) + "...", e);
throw e;
}
}
});
rowProcessorExecutor.execute(task);
try {
task.get(timeout, TimeUnit.MILLISECONDS);
} catch (TimeoutException te) {
LOG.error("RowProcessor timeout:" + timeout + " ms on row(s):" +
Bytes.toStringBinary(processor.getRowsToLock().iterator().next()) +
"...");
throw new IOException(te);
} catch (Exception e) {
throw new IOException(e);
}
}
public Result append(Append append) throws IOException {
return append(append, HConstants.NO_NONCE, HConstants.NO_NONCE);
}
// TODO: There's a lot of boiler plate code identical to increment.
// We should refactor append and increment as local get-mutate-put
// transactions, so all stores only go through one code path for puts.
@Override
public Result append(Append append, long nonceGroup, long nonce) throws IOException {
byte[] row = append.getRow();
checkRow(row, "append");
boolean flush = false;
Durability durability = getEffectiveDurability(append.getDurability());
boolean writeToWAL = durability != Durability.SKIP_WAL;
WALEdit walEdits = null;
List<Cell> allKVs = new ArrayList<Cell>(append.size());
Map<Store, List<Cell>> tempMemstore = new HashMap<Store, List<Cell>>();
long size = 0;
long txid = 0;
checkReadOnly();
checkResources();
// Lock row
startRegionOperation(Operation.APPEND);
this.writeRequestsCount.increment();
long mvccNum = 0;
WriteEntry w = null;
WALKey walKey = null;
RowLock rowLock = null;
List<Cell> memstoreCells = new ArrayList<Cell>();
boolean doRollBackMemstore = false;
try {
rowLock = getRowLock(row);
try {
lock(this.updatesLock.readLock());
try {
// wait for all prior MVCC transactions to finish - while we hold the row lock
// (so that we are guaranteed to see the latest state)
mvcc.waitForPreviousTransactionsComplete();
if (this.coprocessorHost != null) {
Result r = this.coprocessorHost.preAppendAfterRowLock(append);
if(r!= null) {
return r;
}
}
// now start my own transaction
mvccNum = MultiVersionConsistencyControl.getPreAssignedWriteNumber(this.sequenceId);
w = mvcc.beginMemstoreInsertWithSeqNum(mvccNum);
long now = EnvironmentEdgeManager.currentTime();
// Process each family
for (Map.Entry<byte[], List<Cell>> family : append.getFamilyCellMap().entrySet()) {
Store store = stores.get(family.getKey());
List<Cell> kvs = new ArrayList<Cell>(family.getValue().size());
// Sort the cells so that they match the order that they
// appear in the Get results. Otherwise, we won't be able to
// find the existing values if the cells are not specified
// in order by the client since cells are in an array list.
Collections.sort(family.getValue(), store.getComparator());
// Get previous values for all columns in this family
Get get = new Get(row);
for (Cell cell : family.getValue()) {
get.addColumn(family.getKey(), CellUtil.cloneQualifier(cell));
}
List<Cell> results = get(get, false);
// Iterate the input columns and update existing values if they were
// found, otherwise add new column initialized to the append value
// Avoid as much copying as possible. We may need to rewrite and
// consolidate tags. Bytes are only copied once.
// Would be nice if KeyValue had scatter/gather logic
int idx = 0;
for (Cell cell : family.getValue()) {
Cell newCell;
Cell oldCell = null;
if (idx < results.size()
&& CellUtil.matchingQualifier(results.get(idx), cell)) {
oldCell = results.get(idx);
long ts = Math.max(now, oldCell.getTimestamp());
// Process cell tags
List<Tag> newTags = new ArrayList<Tag>();
// Make a union of the set of tags in the old and new KVs
if (oldCell.getTagsLength() > 0) {
Iterator<Tag> i = CellUtil.tagsIterator(oldCell.getTagsArray(),
oldCell.getTagsOffset(), oldCell.getTagsLength());
while (i.hasNext()) {
newTags.add(i.next());
}
}
if (cell.getTagsLength() > 0) {
Iterator<Tag> i = CellUtil.tagsIterator(cell.getTagsArray(),
cell.getTagsOffset(), cell.getTagsLength());
while (i.hasNext()) {
newTags.add(i.next());
}
}
// Cell TTL handling
if (append.getTTL() != Long.MAX_VALUE) {
// Add the new TTL tag
newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(append.getTTL())));
}
// Rebuild tags
byte[] tagBytes = Tag.fromList(newTags);
// allocate an empty cell once
newCell = new KeyValue(row.length, cell.getFamilyLength(),
cell.getQualifierLength(), ts, KeyValue.Type.Put,
oldCell.getValueLength() + cell.getValueLength(),
tagBytes.length);
// copy in row, family, and qualifier
System.arraycopy(cell.getRowArray(), cell.getRowOffset(),
newCell.getRowArray(), newCell.getRowOffset(), cell.getRowLength());
System.arraycopy(cell.getFamilyArray(), cell.getFamilyOffset(),
newCell.getFamilyArray(), newCell.getFamilyOffset(),
cell.getFamilyLength());
System.arraycopy(cell.getQualifierArray(), cell.getQualifierOffset(),
newCell.getQualifierArray(), newCell.getQualifierOffset(),
cell.getQualifierLength());
// copy in the value
System.arraycopy(oldCell.getValueArray(), oldCell.getValueOffset(),
newCell.getValueArray(), newCell.getValueOffset(),
oldCell.getValueLength());
System.arraycopy(cell.getValueArray(), cell.getValueOffset(),
newCell.getValueArray(),
newCell.getValueOffset() + oldCell.getValueLength(),
cell.getValueLength());
// Copy in tag data
System.arraycopy(tagBytes, 0, newCell.getTagsArray(), newCell.getTagsOffset(),
tagBytes.length);
idx++;
} else {
// Append's KeyValue.Type==Put and ts==HConstants.LATEST_TIMESTAMP
CellUtil.updateLatestStamp(cell, now);
// Cell TTL handling
if (append.getTTL() != Long.MAX_VALUE) {
List<Tag> newTags = new ArrayList<Tag>(1);
newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(append.getTTL())));
// Add the new TTL tag
newCell = new KeyValue(cell.getRowArray(), cell.getRowOffset(),
cell.getRowLength(),
cell.getFamilyArray(), cell.getFamilyOffset(),
cell.getFamilyLength(),
cell.getQualifierArray(), cell.getQualifierOffset(),
cell.getQualifierLength(),
cell.getTimestamp(), KeyValue.Type.codeToType(cell.getTypeByte()),
cell.getValueArray(), cell.getValueOffset(), cell.getValueLength(),
newTags);
} else {
newCell = cell;
}
}
CellUtil.setSequenceId(newCell, mvccNum);
// Give coprocessors a chance to update the new cell
if (coprocessorHost != null) {
newCell = coprocessorHost.postMutationBeforeWAL(RegionObserver.MutationType.APPEND,
append, oldCell, newCell);
}
kvs.add(newCell);
// Append update to WAL
if (writeToWAL) {
if (walEdits == null) {
walEdits = new WALEdit();
}
walEdits.add(newCell);
}
}
//store the kvs to the temporary memstore before writing WAL
tempMemstore.put(store, kvs);
}
//Actually write to Memstore now
for (Map.Entry<Store, List<Cell>> entry : tempMemstore.entrySet()) {
Store store = entry.getKey();
if (store.getFamily().getMaxVersions() == 1) {
// upsert if VERSIONS for this CF == 1
size += store.upsert(entry.getValue(), getSmallestReadPoint());
memstoreCells.addAll(entry.getValue());
} else {
// otherwise keep older versions around
for (Cell cell: entry.getValue()) {
Pair<Long, Cell> ret = store.add(cell);
size += ret.getFirst();
memstoreCells.add(ret.getSecond());
doRollBackMemstore = true;
}
}
allKVs.addAll(entry.getValue());
}
// Actually write to WAL now
if (writeToWAL) {
// Using default cluster id, as this can only happen in the originating
// cluster. A slave cluster receives the final value (not the delta)
// as a Put.
// we use HLogKey here instead of WALKey directly to support legacy coprocessors.
walKey = new HLogKey(getRegionInfo().getEncodedNameAsBytes(),
this.htableDescriptor.getTableName(), WALKey.NO_SEQUENCE_ID, nonceGroup, nonce);
txid = this.wal.append(this.htableDescriptor, getRegionInfo(), walKey, walEdits,
this.sequenceId, true, memstoreCells);
} else {
recordMutationWithoutWal(append.getFamilyCellMap());
}
if (walKey == null) {
// Append a faked WALEdit in order for SKIP_WAL updates to get mvcc assigned
walKey = this.appendEmptyEdit(this.wal, memstoreCells);
}
size = this.addAndGetGlobalMemstoreSize(size);
flush = isFlushSize(size);
} finally {
this.updatesLock.readLock().unlock();
}
} finally {
rowLock.release();
rowLock = null;
}
// sync the transaction log outside the rowlock
if(txid != 0){
syncOrDefer(txid, durability);
}
doRollBackMemstore = false;
} finally {
if (rowLock != null) {
rowLock.release();
}
// if the wal sync was unsuccessful, remove keys from memstore
if (doRollBackMemstore) {
rollbackMemstore(memstoreCells);
}
if (w != null) {
mvcc.completeMemstoreInsertWithSeqNum(w, walKey);
}
closeRegionOperation(Operation.APPEND);
}
if (this.metricsRegion != null) {
this.metricsRegion.updateAppend();
}
if (flush) {
// Request a cache flush. Do it outside update lock.
requestFlush();
}
return append.isReturnResults() ? Result.create(allKVs) : null;
}
public Result increment(Increment increment) throws IOException {
return increment(increment, HConstants.NO_NONCE, HConstants.NO_NONCE);
}
// TODO: There's a lot of boiler plate code identical to append.
// We should refactor append and increment as local get-mutate-put
// transactions, so all stores only go through one code path for puts.
@Override
public Result increment(Increment increment, long nonceGroup, long nonce)
throws IOException {
byte [] row = increment.getRow();
checkRow(row, "increment");
TimeRange tr = increment.getTimeRange();
boolean flush = false;
Durability durability = getEffectiveDurability(increment.getDurability());
boolean writeToWAL = durability != Durability.SKIP_WAL;
WALEdit walEdits = null;
List<Cell> allKVs = new ArrayList<Cell>(increment.size());
Map<Store, List<Cell>> tempMemstore = new HashMap<Store, List<Cell>>();
long size = 0;
long txid = 0;
checkReadOnly();
checkResources();
// Lock row
startRegionOperation(Operation.INCREMENT);
this.writeRequestsCount.increment();
RowLock rowLock = null;
WriteEntry w = null;
WALKey walKey = null;
long mvccNum = 0;
List<Cell> memstoreCells = new ArrayList<Cell>();
boolean doRollBackMemstore = false;
try {
rowLock = getRowLock(row);
try {
lock(this.updatesLock.readLock());
try {
// wait for all prior MVCC transactions to finish - while we hold the row lock
// (so that we are guaranteed to see the latest state)
mvcc.waitForPreviousTransactionsComplete();
if (this.coprocessorHost != null) {
Result r = this.coprocessorHost.preIncrementAfterRowLock(increment);
if (r != null) {
return r;
}
}
// now start my own transaction
mvccNum = MultiVersionConsistencyControl.getPreAssignedWriteNumber(this.sequenceId);
w = mvcc.beginMemstoreInsertWithSeqNum(mvccNum);
long now = EnvironmentEdgeManager.currentTime();
// Process each family
for (Map.Entry<byte [], List<Cell>> family:
increment.getFamilyCellMap().entrySet()) {
Store store = stores.get(family.getKey());
List<Cell> kvs = new ArrayList<Cell>(family.getValue().size());
// Sort the cells so that they match the order that they
// appear in the Get results. Otherwise, we won't be able to
// find the existing values if the cells are not specified
// in order by the client since cells are in an array list.
Collections.sort(family.getValue(), store.getComparator());
// Get previous values for all columns in this family
Get get = new Get(row);
for (Cell cell: family.getValue()) {
get.addColumn(family.getKey(), CellUtil.cloneQualifier(cell));
}
get.setTimeRange(tr.getMin(), tr.getMax());
List<Cell> results = get(get, false);
// Iterate the input columns and update existing values if they were
// found, otherwise add new column initialized to the increment amount
int idx = 0;
List<Cell> edits = family.getValue();
for (int i = 0; i < edits.size(); i++) {
Cell cell = edits.get(i);
long amount = Bytes.toLong(CellUtil.cloneValue(cell));
boolean noWriteBack = (amount == 0);
List<Tag> newTags = new ArrayList<Tag>();
// Carry forward any tags that might have been added by a coprocessor
if (cell.getTagsLength() > 0) {
Iterator<Tag> itr = CellUtil.tagsIterator(cell.getTagsArray(),
cell.getTagsOffset(), cell.getTagsLength());
while (itr.hasNext()) {
newTags.add(itr.next());
}
}
Cell c = null;
long ts = now;
if (idx < results.size() && CellUtil.matchingQualifier(results.get(idx), cell)) {
c = results.get(idx);
ts = Math.max(now, c.getTimestamp());
if(c.getValueLength() == Bytes.SIZEOF_LONG) {
amount += Bytes.toLong(c.getValueArray(), c.getValueOffset(), Bytes.SIZEOF_LONG);
} else {
// throw DoNotRetryIOException instead of IllegalArgumentException
throw new org.apache.hadoop.hbase.DoNotRetryIOException(
"Attempted to increment field that isn't 64 bits wide");
}
// Carry tags forward from previous version
if (c.getTagsLength() > 0) {
Iterator<Tag> itr = CellUtil.tagsIterator(c.getTagsArray(),
c.getTagsOffset(), c.getTagsLength());
while (itr.hasNext()) {
newTags.add(itr.next());
}
}
if (i < ( edits.size() - 1) && !CellUtil.matchingQualifier(cell, edits.get(i + 1)))
idx++;
}
// Append new incremented KeyValue to list
byte[] q = CellUtil.cloneQualifier(cell);
byte[] val = Bytes.toBytes(amount);
// Add the TTL tag if the mutation carried one
if (increment.getTTL() != Long.MAX_VALUE) {
newTags.add(new Tag(TagType.TTL_TAG_TYPE, Bytes.toBytes(increment.getTTL())));
}
Cell newKV = new KeyValue(row, 0, row.length,
family.getKey(), 0, family.getKey().length,
q, 0, q.length,
ts,
KeyValue.Type.Put,
val, 0, val.length,
newTags);
CellUtil.setSequenceId(newKV, mvccNum);
// Give coprocessors a chance to update the new cell
if (coprocessorHost != null) {
newKV = coprocessorHost.postMutationBeforeWAL(
RegionObserver.MutationType.INCREMENT, increment, c, newKV);
}
allKVs.add(newKV);
if (!noWriteBack) {
kvs.add(newKV);
// Prepare WAL updates
if (writeToWAL) {
if (walEdits == null) {
walEdits = new WALEdit();
}
walEdits.add(newKV);
}
}
}
//store the kvs to the temporary memstore before writing WAL
if (!kvs.isEmpty()) {
tempMemstore.put(store, kvs);
}
}
//Actually write to Memstore now
if (!tempMemstore.isEmpty()) {
for (Map.Entry<Store, List<Cell>> entry : tempMemstore.entrySet()) {
Store store = entry.getKey();
if (store.getFamily().getMaxVersions() == 1) {
// upsert if VERSIONS for this CF == 1
size += store.upsert(entry.getValue(), getSmallestReadPoint());
memstoreCells.addAll(entry.getValue());
} else {
// otherwise keep older versions around
for (Cell cell : entry.getValue()) {
Pair<Long, Cell> ret = store.add(cell);
size += ret.getFirst();
memstoreCells.add(ret.getSecond());
doRollBackMemstore = true;
}
}
}
size = this.addAndGetGlobalMemstoreSize(size);
flush = isFlushSize(size);
}
// Actually write to WAL now
if (walEdits != null && !walEdits.isEmpty()) {
if (writeToWAL) {
// Using default cluster id, as this can only happen in the originating
// cluster. A slave cluster receives the final value (not the delta)
// as a Put.
// we use HLogKey here instead of WALKey directly to support legacy coprocessors.
walKey = new HLogKey(this.getRegionInfo().getEncodedNameAsBytes(),
this.htableDescriptor.getTableName(), WALKey.NO_SEQUENCE_ID, nonceGroup, nonce);
txid = this.wal.append(this.htableDescriptor, this.getRegionInfo(),
walKey, walEdits, getSequenceId(), true, memstoreCells);
} else {
recordMutationWithoutWal(increment.getFamilyCellMap());
}
}
if(walKey == null){
// Append a faked WALEdit in order for SKIP_WAL updates to get mvccNum assigned
walKey = this.appendEmptyEdit(this.wal, memstoreCells);
}
} finally {
this.updatesLock.readLock().unlock();
}
} finally {
rowLock.release();
rowLock = null;
}
// sync the transaction log outside the rowlock
if(txid != 0){
syncOrDefer(txid, durability);
}
doRollBackMemstore = false;
} finally {
if (rowLock != null) {
rowLock.release();
}
// if the wal sync was unsuccessful, remove keys from memstore
if (doRollBackMemstore) {
rollbackMemstore(memstoreCells);
}
if (w != null) {
mvcc.completeMemstoreInsertWithSeqNum(w, walKey);
}
closeRegionOperation(Operation.INCREMENT);
if (this.metricsRegion != null) {
this.metricsRegion.updateIncrement();
}
}
if (flush) {
// Request a cache flush. Do it outside update lock.
requestFlush();
}
return increment.isReturnResults() ? Result.create(allKVs) : null;
}
//
// New HBASE-880 Helpers
//
private void checkFamily(final byte [] family)
throws NoSuchColumnFamilyException {
if (!this.htableDescriptor.hasFamily(family)) {
throw new NoSuchColumnFamilyException("Column family " +
Bytes.toString(family) + " does not exist in region " + this
+ " in table " + this.htableDescriptor);
}
}
public static final long FIXED_OVERHEAD = ClassSize.align(
ClassSize.OBJECT +
ClassSize.ARRAY +
45 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT +
(14 * Bytes.SIZEOF_LONG) +
5 * Bytes.SIZEOF_BOOLEAN);
// woefully out of date - currently missing:
// 1 x HashMap - coprocessorServiceHandlers
// 6 x Counter - numMutationsWithoutWAL, dataInMemoryWithoutWAL,
// checkAndMutateChecksPassed, checkAndMutateChecksFailed, readRequestsCount,
// writeRequestsCount
// 1 x HRegion$WriteState - writestate
// 1 x RegionCoprocessorHost - coprocessorHost
// 1 x RegionSplitPolicy - splitPolicy
// 1 x MetricsRegion - metricsRegion
// 1 x MetricsRegionWrapperImpl - metricsRegionWrapper
public static final long DEEP_OVERHEAD = FIXED_OVERHEAD +
ClassSize.OBJECT + // closeLock
(2 * ClassSize.ATOMIC_BOOLEAN) + // closed, closing
(3 * ClassSize.ATOMIC_LONG) + // memStoreSize, numPutsWithoutWAL, dataInMemoryWithoutWAL
(2 * ClassSize.CONCURRENT_HASHMAP) + // lockedRows, scannerReadPoints
WriteState.HEAP_SIZE + // writestate
ClassSize.CONCURRENT_SKIPLISTMAP + ClassSize.CONCURRENT_SKIPLISTMAP_ENTRY + // stores
(2 * ClassSize.REENTRANT_LOCK) + // lock, updatesLock
MultiVersionConsistencyControl.FIXED_SIZE // mvcc
+ ClassSize.TREEMAP // maxSeqIdInStores
+ 2 * ClassSize.ATOMIC_INTEGER // majorInProgress, minorInProgress
;
@Override
public long heapSize() {
long heapSize = DEEP_OVERHEAD;
for (Store store : this.stores.values()) {
heapSize += store.heapSize();
}
// this does not take into account row locks, recent flushes, mvcc entries, and more
return heapSize;
}
/*
* This method calls System.exit.
* @param message Message to print out. May be null.
*/
private static void printUsageAndExit(final String message) {
if (message != null && message.length() > 0) System.out.println(message);
System.out.println("Usage: HRegion CATALOG_TABLE_DIR [major_compact]");
System.out.println("Options:");
System.out.println(" major_compact Pass this option to major compact " +
"passed region.");
System.out.println("Default outputs scan of passed region.");
System.exit(1);
}
@Override
public boolean registerService(Service instance) {
/*
* No stacking of instances is allowed for a single service name
*/
Descriptors.ServiceDescriptor serviceDesc = instance.getDescriptorForType();
if (coprocessorServiceHandlers.containsKey(serviceDesc.getFullName())) {
LOG.error("Coprocessor service "+serviceDesc.getFullName()+
" already registered, rejecting request from "+instance
);
return false;
}
coprocessorServiceHandlers.put(serviceDesc.getFullName(), instance);
if (LOG.isDebugEnabled()) {
LOG.debug("Registered coprocessor service: region=" +
Bytes.toStringBinary(getRegionInfo().getRegionName()) +
" service=" + serviceDesc.getFullName());
}
return true;
}
@Override
public Message execService(RpcController controller, CoprocessorServiceCall call)
throws IOException {
String serviceName = call.getServiceName();
String methodName = call.getMethodName();
if (!coprocessorServiceHandlers.containsKey(serviceName)) {
throw new UnknownProtocolException(null,
"No registered coprocessor service found for name "+serviceName+
" in region "+Bytes.toStringBinary(getRegionInfo().getRegionName()));
}
Service service = coprocessorServiceHandlers.get(serviceName);
Descriptors.ServiceDescriptor serviceDesc = service.getDescriptorForType();
Descriptors.MethodDescriptor methodDesc = serviceDesc.findMethodByName(methodName);
if (methodDesc == null) {
throw new UnknownProtocolException(service.getClass(),
"Unknown method "+methodName+" called on service "+serviceName+
" in region "+Bytes.toStringBinary(getRegionInfo().getRegionName()));
}
Message request = service.getRequestPrototype(methodDesc).newBuilderForType()
.mergeFrom(call.getRequest()).build();
if (coprocessorHost != null) {
request = coprocessorHost.preEndpointInvocation(service, methodName, request);
}
final Message.Builder responseBuilder =
service.getResponsePrototype(methodDesc).newBuilderForType();
service.callMethod(methodDesc, controller, request, new RpcCallback<Message>() {
@Override
public void run(Message message) {
if (message != null) {
responseBuilder.mergeFrom(message);
}
}
});
if (coprocessorHost != null) {
coprocessorHost.postEndpointInvocation(service, methodName, request, responseBuilder);
}
return responseBuilder.build();
}
/*
* Process table.
* Do major compaction or list content.
* @throws IOException
*/
private static void processTable(final FileSystem fs, final Path p,
final WALFactory walFactory, final Configuration c,
final boolean majorCompact)
throws IOException {
HRegion region;
FSTableDescriptors fst = new FSTableDescriptors(c);
// Currently expects tables have one region only.
if (FSUtils.getTableName(p).equals(TableName.META_TABLE_NAME)) {
final WAL wal = walFactory.getMetaWAL(
HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes());
region = HRegion.newHRegion(p, wal, fs, c,
HRegionInfo.FIRST_META_REGIONINFO, fst.get(TableName.META_TABLE_NAME), null);
} else {
throw new IOException("Not a known catalog table: " + p.toString());
}
try {
region.initialize(null);
if (majorCompact) {
region.compact(true);
} else {
// Default behavior
Scan scan = new Scan();
// scan.addFamily(HConstants.CATALOG_FAMILY);
RegionScanner scanner = region.getScanner(scan);
try {
List<Cell> kvs = new ArrayList<Cell>();
boolean done;
do {
kvs.clear();
done = scanner.next(kvs);
if (kvs.size() > 0) LOG.info(kvs);
} while (done);
} finally {
scanner.close();
}
}
} finally {
region.close();
}
}
boolean shouldForceSplit() {
return this.splitRequest;
}
byte[] getExplicitSplitPoint() {
return this.explicitSplitPoint;
}
void forceSplit(byte[] sp) {
// This HRegion will go away after the forced split is successful
// But if a forced split fails, we need to clear forced split.
this.splitRequest = true;
if (sp != null) {
this.explicitSplitPoint = sp;
}
}
void clearSplit() {
this.splitRequest = false;
this.explicitSplitPoint = null;
}
/**
* Give the region a chance to prepare before it is split.
*/
protected void prepareToSplit() {
// nothing
}
/**
* Return the splitpoint. null indicates the region isn't splittable
* If the splitpoint isn't explicitly specified, it will go over the stores
* to find the best splitpoint. Currently the criteria of best splitpoint
* is based on the size of the store.
*/
public byte[] checkSplit() {
// Can't split META
if (this.getRegionInfo().isMetaTable() ||
TableName.NAMESPACE_TABLE_NAME.equals(this.getRegionInfo().getTable())) {
if (shouldForceSplit()) {
LOG.warn("Cannot split meta region in HBase 0.20 and above");
}
return null;
}
// Can't split region which is in recovering state
if (this.isRecovering()) {
LOG.info("Cannot split region " + this.getRegionInfo().getEncodedName() + " in recovery.");
return null;
}
if (!splitPolicy.shouldSplit()) {
return null;
}
byte[] ret = splitPolicy.getSplitPoint();
if (ret != null) {
try {
checkRow(ret, "calculated split");
} catch (IOException e) {
LOG.error("Ignoring invalid split", e);
return null;
}
}
return ret;
}
/**
* @return The priority that this region should have in the compaction queue
*/
public int getCompactPriority() {
int count = Integer.MAX_VALUE;
for (Store store : stores.values()) {
count = Math.min(count, store.getCompactPriority());
}
return count;
}
/** @return the coprocessor host */
public RegionCoprocessorHost getCoprocessorHost() {
return coprocessorHost;
}
/** @param coprocessorHost the new coprocessor host */
public void setCoprocessorHost(final RegionCoprocessorHost coprocessorHost) {
this.coprocessorHost = coprocessorHost;
}
@Override
public void startRegionOperation() throws IOException {
startRegionOperation(Operation.ANY);
}
@Override
public void startRegionOperation(Operation op) throws IOException {
switch (op) {
case GET: // read operations
case SCAN:
checkReadsEnabled();
case INCREMENT: // write operations
case APPEND:
case SPLIT_REGION:
case MERGE_REGION:
case PUT:
case DELETE:
case BATCH_MUTATE:
case COMPACT_REGION:
// when a region is in recovering state, no read, split or merge is allowed
if (isRecovering() && (this.disallowWritesInRecovering ||
(op != Operation.PUT && op != Operation.DELETE && op != Operation.BATCH_MUTATE))) {
throw new RegionInRecoveryException(getRegionInfo().getRegionNameAsString() +
" is recovering; cannot take reads");
}
break;
default:
break;
}
if (op == Operation.MERGE_REGION || op == Operation.SPLIT_REGION
|| op == Operation.COMPACT_REGION) {
// split, merge or compact region doesn't need to check the closing/closed state or lock the
// region
return;
}
if (this.closing.get()) {
throw new NotServingRegionException(getRegionInfo().getRegionNameAsString() + " is closing");
}
lock(lock.readLock());
if (this.closed.get()) {
lock.readLock().unlock();
throw new NotServingRegionException(getRegionInfo().getRegionNameAsString() + " is closed");
}
try {
if (coprocessorHost != null) {
coprocessorHost.postStartRegionOperation(op);
}
} catch (Exception e) {
lock.readLock().unlock();
throw new IOException(e);
}
}
@Override
public void closeRegionOperation() throws IOException {
closeRegionOperation(Operation.ANY);
}
/**
* Closes the lock. This needs to be called in the finally block corresponding
* to the try block of {@link #startRegionOperation(Operation)}
* @throws IOException
*/
public void closeRegionOperation(Operation operation) throws IOException {
lock.readLock().unlock();
if (coprocessorHost != null) {
coprocessorHost.postCloseRegionOperation(operation);
}
}
/**
* This method needs to be called before any public call that reads or
* modifies stores in bulk. It has to be called just before a try.
* #closeBulkRegionOperation needs to be called in the try's finally block
* Acquires a writelock and checks if the region is closing or closed.
* @throws NotServingRegionException when the region is closing or closed
* @throws RegionTooBusyException if failed to get the lock in time
* @throws InterruptedIOException if interrupted while waiting for a lock
*/
private void startBulkRegionOperation(boolean writeLockNeeded)
throws NotServingRegionException, RegionTooBusyException, InterruptedIOException {
if (this.closing.get()) {
throw new NotServingRegionException(getRegionInfo().getRegionNameAsString() + " is closing");
}
if (writeLockNeeded) lock(lock.writeLock());
else lock(lock.readLock());
if (this.closed.get()) {
if (writeLockNeeded) lock.writeLock().unlock();
else lock.readLock().unlock();
throw new NotServingRegionException(getRegionInfo().getRegionNameAsString() + " is closed");
}
}
/**
* Closes the lock. This needs to be called in the finally block corresponding
* to the try block of #startRegionOperation
*/
private void closeBulkRegionOperation(){
if (lock.writeLock().isHeldByCurrentThread()) lock.writeLock().unlock();
else lock.readLock().unlock();
}
/**
* Update counters for numer of puts without wal and the size of possible data loss.
* These information are exposed by the region server metrics.
*/
private void recordMutationWithoutWal(final Map<byte [], List<Cell>> familyMap) {
numMutationsWithoutWAL.increment();
if (numMutationsWithoutWAL.get() <= 1) {
LOG.info("writing data to region " + this +
" with WAL disabled. Data may be lost in the event of a crash.");
}
long mutationSize = 0;
for (List<Cell> cells: familyMap.values()) {
assert cells instanceof RandomAccess;
int listSize = cells.size();
for (int i=0; i < listSize; i++) {
Cell cell = cells.get(i);
// TODO we need include tags length also here.
mutationSize += KeyValueUtil.keyLength(cell) + cell.getValueLength();
}
}
dataInMemoryWithoutWAL.add(mutationSize);
}
private void lock(final Lock lock)
throws RegionTooBusyException, InterruptedIOException {
lock(lock, 1);
}
/**
* Try to acquire a lock. Throw RegionTooBusyException
* if failed to get the lock in time. Throw InterruptedIOException
* if interrupted while waiting for the lock.
*/
private void lock(final Lock lock, final int multiplier)
throws RegionTooBusyException, InterruptedIOException {
try {
final long waitTime = Math.min(maxBusyWaitDuration,
busyWaitDuration * Math.min(multiplier, maxBusyWaitMultiplier));
if (!lock.tryLock(waitTime, TimeUnit.MILLISECONDS)) {
throw new RegionTooBusyException(
"failed to get a lock in " + waitTime + " ms. " +
"regionName=" + (this.getRegionInfo() == null ? "unknown" :
this.getRegionInfo().getRegionNameAsString()) +
", server=" + (this.getRegionServerServices() == null ? "unknown" :
this.getRegionServerServices().getServerName()));
}
} catch (InterruptedException ie) {
LOG.info("Interrupted while waiting for a lock");
InterruptedIOException iie = new InterruptedIOException();
iie.initCause(ie);
throw iie;
}
}
/**
* Calls sync with the given transaction ID if the region's table is not
* deferring it.
* @param txid should sync up to which transaction
* @throws IOException If anything goes wrong with DFS
*/
private void syncOrDefer(long txid, Durability durability) throws IOException {
if (this.getRegionInfo().isMetaRegion()) {
this.wal.sync(txid);
} else {
switch(durability) {
case USE_DEFAULT:
// do what table defaults to
if (shouldSyncWAL()) {
this.wal.sync(txid);
}
break;
case SKIP_WAL:
// nothing do to
break;
case ASYNC_WAL:
// nothing do to
break;
case SYNC_WAL:
case FSYNC_WAL:
// sync the WAL edit (SYNC and FSYNC treated the same for now)
this.wal.sync(txid);
break;
}
}
}
/**
* Check whether we should sync the wal from the table's durability settings
*/
private boolean shouldSyncWAL() {
return durability.ordinal() > Durability.ASYNC_WAL.ordinal();
}
/**
* A mocked list implementation - discards all updates.
*/
private static final List<Cell> MOCKED_LIST = new AbstractList<Cell>() {
@Override
public void add(int index, Cell element) {
// do nothing
}
@Override
public boolean addAll(int index, Collection<? extends Cell> c) {
return false; // this list is never changed as a result of an update
}
@Override
public KeyValue get(int index) {
throw new UnsupportedOperationException();
}
@Override
public int size() {
return 0;
}
};
/**
* Facility for dumping and compacting catalog tables.
* Only does catalog tables since these are only tables we for sure know
* schema on. For usage run:
* <pre>
* ./bin/hbase org.apache.hadoop.hbase.regionserver.HRegion
* </pre>
* @throws IOException
*/
public static void main(String[] args) throws IOException {
if (args.length < 1) {
printUsageAndExit(null);
}
boolean majorCompact = false;
if (args.length > 1) {
if (!args[1].toLowerCase().startsWith("major")) {
printUsageAndExit("ERROR: Unrecognized option <" + args[1] + ">");
}
majorCompact = true;
}
final Path tableDir = new Path(args[0]);
final Configuration c = HBaseConfiguration.create();
final FileSystem fs = FileSystem.get(c);
final Path logdir = new Path(c.get("hbase.tmp.dir"));
final String logname = "wal" + FSUtils.getTableName(tableDir) + System.currentTimeMillis();
final Configuration walConf = new Configuration(c);
FSUtils.setRootDir(walConf, logdir);
final WALFactory wals = new WALFactory(walConf, null, logname);
try {
processTable(fs, tableDir, wals, c, majorCompact);
} finally {
wals.close();
// TODO: is this still right?
BlockCache bc = new CacheConfig(c).getBlockCache();
if (bc != null) bc.shutdown();
}
}
@Override
public long getOpenSeqNum() {
return this.openSeqNum;
}
@Override
public Map<byte[], Long> getMaxStoreSeqId() {
return this.maxSeqIdInStores;
}
@Override
public long getOldestSeqIdOfStore(byte[] familyName) {
return wal.getEarliestMemstoreSeqNum(getRegionInfo()
.getEncodedNameAsBytes(), familyName);
}
@Override
public CompactionState getCompactionState() {
boolean hasMajor = majorInProgress.get() > 0, hasMinor = minorInProgress.get() > 0;
return (hasMajor ? (hasMinor ? CompactionState.MAJOR_AND_MINOR : CompactionState.MAJOR)
: (hasMinor ? CompactionState.MINOR : CompactionState.NONE));
}
public void reportCompactionRequestStart(boolean isMajor){
(isMajor ? majorInProgress : minorInProgress).incrementAndGet();
}
public void reportCompactionRequestEnd(boolean isMajor, int numFiles, long filesSizeCompacted) {
int newValue = (isMajor ? majorInProgress : minorInProgress).decrementAndGet();
// metrics
compactionsFinished.incrementAndGet();
compactionNumFilesCompacted.addAndGet(numFiles);
compactionNumBytesCompacted.addAndGet(filesSizeCompacted);
assert newValue >= 0;
}
/**
* Do not change this sequence id. See {@link #sequenceId} comment.
* @return sequenceId
*/
@VisibleForTesting
public AtomicLong getSequenceId() {
return this.sequenceId;
}
/**
* sets this region's sequenceId.
* @param value new value
*/
private void setSequenceId(long value) {
this.sequenceId.set(value);
}
@VisibleForTesting class RowLockContext {
private final HashedBytes row;
private final CountDownLatch latch = new CountDownLatch(1);
private final Thread thread;
private int lockCount = 0;
RowLockContext(HashedBytes row) {
this.row = row;
this.thread = Thread.currentThread();
}
boolean ownedByCurrentThread() {
return thread == Thread.currentThread();
}
RowLock newLock() {
lockCount++;
RowLockImpl rl = new RowLockImpl();
rl.setContext(this);
return rl;
}
void releaseLock() {
if (!ownedByCurrentThread()) {
throw new IllegalArgumentException("Lock held by thread: " + thread
+ " cannot be released by different thread: " + Thread.currentThread());
}
lockCount--;
if (lockCount == 0) {
// no remaining locks by the thread, unlock and allow other threads to access
RowLockContext existingContext = lockedRows.remove(row);
if (existingContext != this) {
throw new RuntimeException(
"Internal row lock state inconsistent, should not happen, row: " + row);
}
latch.countDown();
}
}
}
public static class RowLockImpl implements RowLock {
private RowLockContext context;
private boolean released = false;
@VisibleForTesting
public RowLockContext getContext() {
return context;
}
@VisibleForTesting
public void setContext(RowLockContext context) {
this.context = context;
}
@Override
public void release() {
if (!released) {
context.releaseLock();
}
released = true;
}
}
/**
* Append a faked WALEdit in order to get a long sequence number and wal syncer will just ignore
* the WALEdit append later.
* @param wal
* @param cells list of Cells inserted into memstore. Those Cells are passed in order to
* be updated with right mvcc values(their wal sequence number)
* @return Return the key used appending with no sync and no append.
* @throws IOException
*/
private WALKey appendEmptyEdit(final WAL wal, List<Cell> cells) throws IOException {
// we use HLogKey here instead of WALKey directly to support legacy coprocessors.
WALKey key = new HLogKey(getRegionInfo().getEncodedNameAsBytes(), getRegionInfo().getTable(),
WALKey.NO_SEQUENCE_ID, 0, null, HConstants.NO_NONCE, HConstants.NO_NONCE);
// Call append but with an empty WALEdit. The returned seqeunce id will not be associated
// with any edit and we can be sure it went in after all outstanding appends.
wal.append(getTableDesc(), getRegionInfo(), key,
WALEdit.EMPTY_WALEDIT, this.sequenceId, false, cells);
return key;
}
/**
* {@inheritDoc}
*/
@Override
public void onConfigurationChange(Configuration conf) {
// Do nothing for now.
}
/**
* {@inheritDoc}
*/
@Override
public void registerChildren(ConfigurationManager manager) {
configurationManager = Optional.of(manager);
for (Store s : this.stores.values()) {
configurationManager.get().registerObserver(s);
}
}
/**
* {@inheritDoc}
*/
@Override
public void deregisterChildren(ConfigurationManager manager) {
for (Store s : this.stores.values()) {
configurationManager.get().deregisterObserver(s);
}
}
/**
* @return split policy for this region.
*/
public RegionSplitPolicy getSplitPolicy() {
return this.splitPolicy;
}
}
| HBASE-13471 Fix a possible infinite loop in doMiniBatchMutation
Summary: in doMiniBatchMutation it is possible to get into an infinite loop when a query has a row that is not in the region. If the batch had an invalid row, the row lock further down the function would fail to acquire because it was an invalid row. However we'd catch the exception and improperly treat it as if we had not acquired the lock and then try acquiring the lock again. Thus once we got into this state we'd be stuck in an infinite loop. Worse yet, this infiite loop would occur with the readLock held. So any other opertaions such as doClose() would be locked out and stuck. The patch is to check whether the row is valid and short circuit the failure when it doesn't work.
Test Plan: IntegrationTestReplication would consistently fail when trying to disable large tables before the fix. After the test the tests pass consistently.
Reviewers: eclark
Subscribers: asameet
Differential Revision: https://reviews.facebook.net/D37437
Signed-off-by: Elliott Clark <[email protected]>
| hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java | HBASE-13471 Fix a possible infinite loop in doMiniBatchMutation | <ide><path>base-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
<ide> } else {
<ide> prepareDelete((Delete) mutation);
<ide> }
<add> checkRow(mutation.getRow(), "doMiniBatchMutation");
<ide> } catch (NoSuchColumnFamilyException nscf) {
<ide> LOG.warn("No such column family in batch mutation", nscf);
<ide> batchOp.retCodeDetails[lastIndexExclusive] = new OperationStatus(
<ide> LOG.warn("Batch Mutation did not pass sanity check", fsce);
<ide> batchOp.retCodeDetails[lastIndexExclusive] = new OperationStatus(
<ide> OperationStatusCode.SANITY_CHECK_FAILURE, fsce.getMessage());
<add> lastIndexExclusive++;
<add> continue;
<add> } catch (WrongRegionException we) {
<add> LOG.warn("Batch mutation had a row that does not belong to this region", we);
<add> batchOp.retCodeDetails[lastIndexExclusive] = new OperationStatus(
<add> OperationStatusCode.SANITY_CHECK_FAILURE, we.getMessage());
<ide> lastIndexExclusive++;
<ide> continue;
<ide> }
<ide> * started (the calling thread has already acquired the region-close-guard lock).
<ide> */
<ide> protected RowLock getRowLockInternal(byte[] row, boolean waitForLock) throws IOException {
<del> checkRow(row, "row lock");
<ide> HashedBytes rowKey = new HashedBytes(row);
<ide> RowLockContext rowLockContext = new RowLockContext(rowKey);
<ide> |
|
Java | apache-2.0 | b9c2b3b25d525d222deb5da8698356d6c09a33b5 | 0 | codeaudit/OG-Platform,jeorme/OG-Platform,codeaudit/OG-Platform,nssales/OG-Platform,jerome79/OG-Platform,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,jerome79/OG-Platform,McLeodMoores/starling,nssales/OG-Platform,McLeodMoores/starling,codeaudit/OG-Platform,jerome79/OG-Platform,ChinaQuants/OG-Platform,nssales/OG-Platform,ChinaQuants/OG-Platform,codeaudit/OG-Platform,ChinaQuants/OG-Platform,McLeodMoores/starling,jeorme/OG-Platform,jeorme/OG-Platform,jeorme/OG-Platform,nssales/OG-Platform,DevStreet/FinanceAnalytics,jerome79/OG-Platform,DevStreet/FinanceAnalytics,DevStreet/FinanceAnalytics,McLeodMoores/starling | /**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.model.finitedifference;
import org.testng.annotations.Test;
/**
* Test ThetaMethodFiniteDifference when theta = 0.5 (i.e. A Crank-Nicolson scheme)
*/
public class ThetaMethodFiniteDifferenceTest {
private static final ConvectionDiffusionPDESolverTestCase TESTER = new ConvectionDiffusionPDESolverTestCase();
private static final ThetaMethodFiniteDifference SOLVER = new ThetaMethodFiniteDifference(0.5);
@Test
public void testBlackScholesEquation() {
int timeSteps = 10;
int priceSteps = 100;
double lowerMoneyness = 0.4;
double upperMoneyness = 3.0;
double volTol = 5e-3;
double priceTol = 5e-2;
double deltaTol = 5e-2;
double gammaTol = 1.0; // Crank-Nicolson gives awful greeks around ATM - this is why it shouldn't be used
boolean print = false; // set to false before pushing
TESTER.testBlackScholesEquationUniformGrid(SOLVER, timeSteps, priceSteps, lowerMoneyness, upperMoneyness, volTol, priceTol, deltaTol, gammaTol, print);
}
@Test
public void testBlackScholesEquationNonuniformGrid() {
int timeSteps = 10;
int priceSteps = 100;
double lowerMoneyness = 0.4;
double upperMoneyness = 3.0;
double volTol = 2e-2; // Crank-Nicolson does not play well with non-uniform grids
double priceTol = 5e-2;
double deltaTol = 5e-1;
double gammaTol = 2e1;// Doesn't even get the sign of gamma right ATM
boolean print = false; // set to false before pushing
TESTER.testBlackScholesEquationNonuniformGrid(SOLVER, timeSteps, priceSteps, lowerMoneyness, upperMoneyness, volTol, priceTol, deltaTol, gammaTol, print);
}
@Test
public void testLogBlackScholesEquation() {
int timeSteps = 10;
int priceSteps = 100;
double lowerMoneyness = 0.3;
double upperMoneyness = 4.0;
double volTol = 5e-3;
double priceTol = 5e-2;
double deltaTol = 5e-2;
double gammaTol = 5.0; // Crank-Nicolson gives awful greeks around ATM - this is why it shouldn't be used
boolean print = false; // set to false before pushing
TESTER.testLogTransformedBlackScholesEquation(SOLVER, timeSteps, priceSteps, lowerMoneyness, upperMoneyness, volTol, priceTol, deltaTol, gammaTol, print);
}
@Test
public void testCEV() {
int timeSteps = 10;
int priceSteps = 100;
double lowerMoneyness = 0.3;
double upperMoneyness = 3.0;
double volTol = 5e-3;
boolean print = false; // set to false before pushing
TESTER.testCEV(SOLVER, timeSteps, priceSteps, lowerMoneyness, upperMoneyness, volTol, print);
}
@Test
public void testAmericanPrice() {
int timeSteps = 10;
int priceSteps = 100;
double lowerMoneyness = 0.4;
double upperMoneyness = 3.0;
double priceTol = 1e-3;
boolean print = false; // set to false before pushing
TESTER.testAmericanPrice(SOLVER, timeSteps, priceSteps, lowerMoneyness, upperMoneyness, priceTol, print);
}
}
| projects/OG-Analytics/tests/unit/com/opengamma/financial/model/finitedifference/ThetaMethodFiniteDifferenceTest.java | /**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.model.finitedifference;
import org.testng.annotations.Test;
/**
* Test ThetaMethodFiniteDifference when theta = 0.5 (i.e. A Crank-Nicolson scheme)
*/
public class ThetaMethodFiniteDifferenceTest {
private static final ConvectionDiffusionPDESolverTestCase TESTER = new ConvectionDiffusionPDESolverTestCase();
private static final ThetaMethodFiniteDifference SOLVER = new ThetaMethodFiniteDifference(1.0);
@Test
public void testBlackScholesEquation() {
int timeSteps = 10;
int priceSteps = 100;
double lowerMoneyness = 0.4;
double upperMoneyness = 3.0;
double volTol = 5e-3;
double priceTol = 5e-2;
double deltaTol = 5e-2;
double gammaTol = 1.0; // Crank-Nicolson gives awful greeks around ATM - this is why it shouldn't be used
boolean print = true; // set to false before pushing
TESTER.testBlackScholesEquationUniformGrid(SOLVER, timeSteps, priceSteps, lowerMoneyness, upperMoneyness, volTol, priceTol, deltaTol, gammaTol, print);
}
@Test
public void testBlackScholesEquationNonuniformGrid() {
int timeSteps = 10;
int priceSteps = 100;
double lowerMoneyness = 0.4;
double upperMoneyness = 3.0;
double volTol = 2e-2; // Crank-Nicolson does not play well with non-uniform grids
double priceTol = 5e-2;
double deltaTol = 5e-1;
double gammaTol = 2e1;// Doesn't even get the sign of gamma right ATM
boolean print = false; // set to false before pushing
TESTER.testBlackScholesEquationNonuniformGrid(SOLVER, timeSteps, priceSteps, lowerMoneyness, upperMoneyness, volTol, priceTol, deltaTol, gammaTol, print);
}
@Test
public void testLogBlackScholesEquation() {
int timeSteps = 10;
int priceSteps = 100;
double lowerMoneyness = 0.3;
double upperMoneyness = 4.0;
double volTol = 5e-3;
double priceTol = 5e-2;
double deltaTol = 5e-2;
double gammaTol = 5.0; // Crank-Nicolson gives awful greeks around ATM - this is why it shouldn't be used
boolean print = false; // set to false before pushing
TESTER.testLogTransformedBlackScholesEquation(SOLVER, timeSteps, priceSteps, lowerMoneyness, upperMoneyness, volTol, priceTol, deltaTol, gammaTol, print);
}
@Test
public void testCEV() {
int timeSteps = 10;
int priceSteps = 100;
double lowerMoneyness = 0.3;
double upperMoneyness = 3.0;
double volTol = 5e-3;
boolean print = false; // set to false before pushing
TESTER.testCEV(SOLVER, timeSteps, priceSteps, lowerMoneyness, upperMoneyness, volTol, print);
}
@Test
public void testAmericanPrice() {
int timeSteps = 10;
int priceSteps = 100;
double lowerMoneyness = 0.4;
double upperMoneyness = 3.0;
double priceTol = 1e-3;
boolean print = false; // set to false before pushing
TESTER.testAmericanPrice(SOLVER, timeSteps, priceSteps, lowerMoneyness, upperMoneyness, priceTol, print);
}
}
| adding missing tests
| projects/OG-Analytics/tests/unit/com/opengamma/financial/model/finitedifference/ThetaMethodFiniteDifferenceTest.java | adding missing tests | <ide><path>rojects/OG-Analytics/tests/unit/com/opengamma/financial/model/finitedifference/ThetaMethodFiniteDifferenceTest.java
<ide> public class ThetaMethodFiniteDifferenceTest {
<ide>
<ide> private static final ConvectionDiffusionPDESolverTestCase TESTER = new ConvectionDiffusionPDESolverTestCase();
<del> private static final ThetaMethodFiniteDifference SOLVER = new ThetaMethodFiniteDifference(1.0);
<add> private static final ThetaMethodFiniteDifference SOLVER = new ThetaMethodFiniteDifference(0.5);
<ide>
<ide> @Test
<ide> public void testBlackScholesEquation() {
<ide> double priceTol = 5e-2;
<ide> double deltaTol = 5e-2;
<ide> double gammaTol = 1.0; // Crank-Nicolson gives awful greeks around ATM - this is why it shouldn't be used
<del> boolean print = true; // set to false before pushing
<add> boolean print = false; // set to false before pushing
<ide>
<ide> TESTER.testBlackScholesEquationUniformGrid(SOLVER, timeSteps, priceSteps, lowerMoneyness, upperMoneyness, volTol, priceTol, deltaTol, gammaTol, print);
<ide> } |
|
JavaScript | mit | 599775959e85ae94f50b97001c6e3cf54cad89cb | 0 | kidaa/Pleiades,XtrKiL/Pleiades,XtrKiL/Pleiades,kidaa/Pleiades,kidaa/Pleiades,kidaa/Pleiades,XtrKiL/Pleiades,XtrKiL/Pleiades | // Place all the behaviors and hooks related to the matching controller here.
// All this logic will automatically be available in application.js.
$(document).ready(function() {// Javascript object to store all map data
console.log(introTrue);
$("#mobile").hide();
if( /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent) ) {
// You are in mobile browser
$("#notinmobile").hide();
$("#mobile").show();
introTrue = false;
}
//Intro Js QuikMap creation introduction
var createguide = introJs();
createguide.setOptions({
steps: [
{
intro: 'This is the QuikMap creation page'
},
{
element:"#nameMap",
intro: "Name your map after its destination. \
This is so that other people can search for it easily"
},
{
element:"#canvas",
intro:"The map canvas"
},
{
element:"#toolbar",
intro:"The toolbar, with all the tools you need to \
create your own QuikMap. Click on them to know what they do"
},
{
intro:"Now some tips on how to draw a QuikMap"
},
{
element:"#drawbutton",
intro:"<strong>Firstly</strong>, draw the routes/paths \
from your starting point to your endpoint"
},
{
element:"#landmarkbutton",
intro:"<strong>Secondly</strong>, add in the landmarks that\
you can see as you travel along your route. Usually\
<ul>\
<li><b>3</b> per junction</li>\
<li><b>2</b> per lane</li>\
</ul>"
},
{
element:"#submitbutton",
intro:"<strong>Lastly</strong>, remember to submit your QuikMap, so that you can share it!"
}
],
doneLabel: 'Done',
tooltipPosition: 'auto',
positionPrecedence: ['left', 'right', 'bottom', 'top'],
disableInteraction: false
});
if(introTrue){
createguide.start();
}
$('#howtousebutton').click(function(){
console.log("???");
createguide.goToStep(6).start();
});
//bootstrapSwitch
$('#isStraight').bootstrapSwitch('state',false);
$('#isStraight').bootstrapSwitch("onText",'Straight');
$('#isStraight').bootstrapSwitch("offText",'Curvy');
$('#isStraight').bootstrapSwitch("onColor",'primary');
$('#isStraight').bootstrapSwitch("offColor",'info');
$('#isLandmark').bootstrapSwitch('state',false);
$('#isLandmark').bootstrapSwitch("onText",'Landmark');
$('#isLandmark').bootstrapSwitch("offText",'Points');
$('#isLandmark').bootstrapSwitch("onColor",'primary');
$('#isLandmark').bootstrapSwitch("offColor",'primary');
//map_canvas properties
var golden_ratio = 1.61803398875;
var map_canvas = $('#canvas');
var ctx = canvas.getContext("2d");
var container = $("#app"); //container of map_canvas
cwidth = container.width();
cheight = cwidth / golden_ratio;
map_canvas.attr("width",cwidth);
map_canvas.attr("height",cheight);
//canvas resizing on window resize
$(window).resize(function(){
cwidth = container.width();
cheight = cwidth/golden_ratio;
map_canvas.attr("width",cwidth);
map_canvas.attr("height",cheight);
update_canvas(map_data);
});
//map_data properties
var map_data = {name:"Untitled", maxid: 0, lines:[], landmarks: []};
var undo_stack = new Array();
var redo_stack = new Array();
var tochangeindex = null;
/**
*Toolbar controller.
*controls all buttons in toolbar
*/
var selected = 0;
$('#selectdiv').hide();
$('#drawdiv').hide();
$('#landmarkdiv').hide();
$('#removediv').hide();
$('#undobutton').attr("disabled", true);
$('#redobutton').attr("disabled", true);
$('#selectbutton').click( function() {
selected = 1;
});
$('#drawbutton').click( function() {
selected = 2;
});
$('#landmarkbutton').click( function() {
selected = 3;
});
$('#removebutton').click( function() {
selected = 4;
});
$('#undobutton').click( function() {
selected = 0;
var actiontoundo = undo_stack.pop();
if (actiontoundo.action == "line") {
var line_data;
var to_remove = 0;
for (i = 0; i < map_data.lines.length; i++) {
if (map_data.lines[i].id == actiontoundo.id) {
line_data = map_data.lines[i];
to_remove = i;
break;
}
}
map_data.lines.splice(to_remove, 1);
redo_stack.push({action: "line", data: line_data});
}
if (actiontoundo.action == "landmark") {
var landmarkdata;
var to_remove = 0;
for (i = 0; i < map_data.landmarks.length; i++) {
if (map_data.landmarks[i].id == actiontoundo.id) {
landmarkdata = map_data.landmarks[i];
to_remove = i;
break;
}
}
map_data.landmarks.splice(to_remove, 1);
redo_stack.push({action: "landmark", data: landmarkdata});
}
if (actiontoundo.action == "delete") {
var datatoaddback = actiontoundo.data;
if (datatoaddback.type == "line") {
map_data.lines.push(datatoaddback);
}
else if (datatoaddback.type == "landmark") {
map_data.landmarks.push(datatoaddback);
}
redo_stack.push({action:"delete",data: datatoaddback});
}
if (actiontoundo.action == "changelandmark") {
for (i = 0; i < map_data.landmarks.length; i++) {
if (map_data.landmarks[i].id == actiontoundo.id) {
var newlandmarkname = map_data.landmarks[i].landmarkname;
var newlandmarkimg = map_data.landmarks[i].img;
map_data.landmarks[i].landmarkname = actiontoundo.oldname;
map_data.landmarks[i].img = actiontoundo.oldimg;
redo_stack.push({action: "changelandmark", id: actiontoundo.id, newname: newlandmarkname, newimg: newlandmarkimg});
break;
}
}
}
update_canvas(map_data);
});
$('#redobutton').click( function() {
selected = 0;
var actiontoredo = redo_stack.pop();
if (actiontoredo.action == "line") {
var thisid = actiontoredo.data.id;
undo_stack.push({action:"line", id:thisid});
map_data.lines.push(actiontoredo.data);
}
if (actiontoredo.action == "landmark") {
var thisid = actiontoredo.data.id;
undo_stack.push({action:"landmark", id:thisid});
map_data.landmarks.push(actiontoredo.data);
}
if (actiontoredo.action == "delete") {
todelete = actiontoredo.data;
undo_stack.push({action:"delete",data:todelete});
if (todelete.type == "line") {
map_data.lines.splice(map_data.lines.indexOf(todelete),1);
}
if (todelete.type == "landmark") {
map_data.landmarks.splice(map_data.landmarks.indexOf(todelete),1);
}
}
if (actiontoredo.action == "changelandmark") {
for (i = 0; i < map_data.landmarks.length; i++) {
if (map_data.landmarks[i].id == actiontoredo.id) {
var oldname = map_data.landmarks[i].landmarkname;
var oldimg = map_data.landmarks[i].img;
map_data.landmarks[i].landmarkname = actiontoredo.newname;
map_data.landmarks[i].img = actiontoredo.newimg;
undo_stack.push({action: "changelandmark", id: actiontoredo.id, oldname: oldname, oldimg: oldimg});
break;
}
}
}
update_canvas(map_data);
});
$('#toolbar').click( function() {
$('#selectbutton').attr("disabled", false);
$('#drawbutton').attr("disabled", false);
$('#landmarkbutton').attr("disabled", false);
$('#removebutton').attr("disabled", false);
$('#selectdiv').hide();
$('#drawdiv').hide();
$('#landmarkdiv').hide();
$('#removediv').hide();
switch (selected) {
case 1:
$('#selectbutton').attr("disabled", true);
$('#selectdiv').show();
break;
case 2:
$('#drawbutton').attr("disabled", true);
$('#drawdiv').show();
break;
case 3:
$('#landmarkbutton').attr("disabled", true);
$('#landmarkdiv').show();
break;
case 4:
$('#removebutton').attr("disabled", true);
$('#removediv').show();
break;
}
});
/**
* Toolbar button code ends here
*/
/**
* Landmark popup code
*/
$(document).click( function() {
if (selected != 3 && selected != 1) {
$("#pointpopover").hide();
$("#landmarkpopover").hide();
} else if (selected == 3){
if (!$("#isLandmark").bootstrapSwitch("state")) {
$("#landmarkpopover").hide();
} else {
$("#pointpopover").hide();
}
}
});
function createLandmark(img, name) {
if (selected == 1) {
// edit landmark
var oldname = map_data.landmarks[tochangeindex].landmarkname;
var oldimg = map_data.landmarks[tochangeindex].img;
map_data.landmarks[tochangeindex].landmarkname = name;
map_data.landmarks[tochangeindex].img = img;
undo_stack.push({action: "changelandmark", id: map_data.landmarks[tochangeindex].id, oldname: oldname, oldimg: oldimg});
redo_stack.splice(0, redo_stack.length);
update_canvas(map_data);
} else { // new landmark
var thisid = map_data.maxid;
map_data.maxid += 1;
undo_stack.push({action: "landmark", id: thisid});
redo_stack.splice(0, redo_stack.length);
landmarkpos = {x: landmarkpos.x / cwidth*1.0, y: landmarkpos.y / cheight*1.0};
addElement({type: "landmark", id: thisid, landmarkname: name, img: img, pos: landmarkpos});
}
}
$("#startbutton").click( function() {
name = $("#pointinput").val();
if (name == "") {
name = "Start";
}
createLandmark("startimg", name);
$("#pointinput").val("");
$("#pointpopover").hide();
});
$("#endbutton").click( function() {
name = $("#pointinput").val();
if (name == "") {
name = "End";
}
createLandmark("endimg", name);
$("#pointinput").val("");
$("#pointpopover").hide();
});
$("#trainbutton").click( function() {
name = $("#pointinput").val();
if (name == "") {
name = "Station";
}
createLandmark("trainimg", name);
$("#pointinput").val("");
$("#pointpopover").hide();
});
$("#busbutton").click( function() {
name = $("#pointinput").val();
if (name == "") {
name = "Bus Stop";
}
createLandmark("busimg", name);
$("#pointinput").val("");
$("#pointpopover").hide();
});
$('#digitalbutton').click( function() {
name = $("#landmarkinput").val();
if (name == "") {
name = "Digital";
}
createLandmark("digitalimg", name);
$("#landmarkinput").val("");
$("#landmarkpopover").hide();
});
$('#lifestylebutton').click( function() {
name = $("#landmarkinput").val();
if (name == "") {
name = "Lifestyle";
}
createLandmark("lifestyleimg", name);
$("#landmarkinput").val("");
$("#landmarkpopover").hide();
});
$('#foodbutton').click( function() {
name = $("#landmarkinput").val();
if (name == "") {
name = "F&B";
}
createLandmark("foodimg", name);
$("#landmarkinput").val("");
$("#landmarkpopover").hide();
});
$('#fashionbutton').click( function() {
name = $("#landmarkinput").val();
if (name == "") {
name = "Fashion";
}
createLandmark("fashionimg", name);
$("#landmarkinput").val("");
$("#landmarkpopover").hide();
});
$('#servicesbutton').click( function() {
name = $("#landmarkinput").val();
if (name == "") {
name = "Service";
}
createLandmark("servicesimg", name);
$("#landmarkinput").val("");
$("#landmarkpopover").hide();
});
$('#structuresbutton').click( function() {
name = $("#landmarkinput").val();
if (name == "") {
name = "Structure";
}
createLandmark("structuresimg", name);
$("#landmarkinput").val("");
$("#landmarkpopover").hide();
});
/**
* Landmark popup code ends here
*/
// Canvas Manipulation
var penDown = false;
var startpos;
var points = new Array();
function getMousePos(e) {
return {
x: e.offsetX,
y: e.offsetY
};
}
function get_control_points(start,q1,q2,end){
//Magic number, with golden ratio!
extend_coefficient = 0.5*golden_ratio;
function form_equation(q1,q2){
function equation(x){
var REALLYFUCKINGHIGHVALUE = 9999999999999;
if(q1.x == q2.x) {
gradient = REALLYFUCKINGHIGHVALUE;
} else {
gradient = (q2.y - q1.y)/(q2.x - q1.x);
}
c = q1.y - gradient*q1.x;
return gradient*x + c;
}
return equation;
}
var control1 = {x:0,y:0};
var control2 = {x:0,y:0};
control1.x = Math.floor(q1.x + extend_coefficient*(q1.x-start.x));
control2.x = Math.floor(q2.x - extend_coefficient*(end.x-q2.x));
control1.y = Math.floor(form_equation(start,q1)(control1.x));
control2.y = Math.floor(form_equation(q2,end)(control2.x));
return [control1,control2];
}
map_canvas.on('mousedown',function(e) {
if (penDown === false && selected == 2) {
penDown = true;
var pos = getMousePos(e);
startpos = pos;
ctx.beginPath();
ctx.moveTo(pos.x,pos.y);
}
});
map_canvas.on('mousemove',function(e){
if(penDown === true){
var pos = getMousePos(e);
ctx.lineTo(pos.x,pos.y);
points.push(pos);
console.log(pos);
ctx.lineWidth= 7;
ctx.lineCap ='round';
ctx.linejoin ="round";
ctx.strokeStyle = "rgba(0, 153, 255, 0.04)";
ctx.shadowColor = 'rgba(224,255,255,0.1)';
ctx.shadowBlur = 30;
ctx.stroke();
}
});
map_canvas.on('mouseup',function(e) {
if (penDown === true && selected == 2) {
ctx.closePath();
penDown = false;
var pos = getMousePos(e);
var thisid = map_data.maxid;
undo_stack.push({action: "line", id: thisid});
redo_stack.splice(0, redo_stack.length);
var controlpoints, quarter1, quarter2;
if($('#isStraight').bootstrapSwitch("state")){
controlpoints = [startpos,pos];
}
else{
quarter1 = points[Math.floor((points.length)/4)];
console.log("q1: " + quarter1.x + ", " + quarter1.y);
quarter2 = points[Math.floor((points.length)*3/4)];
console.log("q2: " + quarter2.x + ", " + quarter2.y);
controlpoints = get_control_points(startpos,quarter1,quarter2,pos);
}
addElement({type: "line", id: thisid,
start: {x: startpos.x / cwidth*1.0, y: startpos.y / cheight*1.0},
ctrl1: {x: controlpoints[0].x / cwidth*1.0, y:controlpoints[0].y / cheight*1.0},
ctrl2:{x:controlpoints[1].x / cwidth*1.0, y:controlpoints[1].y / cheight*1.0},
end:{x: pos.x / cwidth*1.0, y: pos.y / cheight*1.0}});
points = new Array();
map_data.maxid += 1;
}
});
var landmarkpos;
//Area formulas for triangle
triAF = function(p1,p2,p3){
return Math.abs(1/2*(p1.x*p2.y+p2.x*p3.y+p3.x*p1.y - p1.y*p2.x - p2.y*p3.x - p3.y*p1.x));
}
//Area formula for quadrilaterals
quadAF = function(p1,p2,p3,p4){
return Math.abs(1/2*(p1.x*p2.y+p2.x*p3.y+p3.x*p4.y+p4.x*p1.y- p1.y*p2.x - p2.y*p3.x - p3.y*p4.x -p4.y*p1.x));
}
//shortest distance to a point formula
shrtD = function(p1,line){
return Math.abs(((line.start.y-line.end.y)/(line.end.x-line.start.x))*p1.x+p1.y+((line.start.y-line.end.y)/(line.end.x-line.start.x))*p1.x-p1.y)/(Math.sqrt(Math.pow((line.start.y-line.end.y)/(line.end.x-line.start.x),2)+1));
}
map_canvas.click(function(e) {
//Arbitrary number for rectangle approximation
var rectApprox = 20;
//Adding landmarks
if (selected == 3) {
var pos = {x: e.pageX, y: e.pageY};
var adj;
if ($("#isLandmark").bootstrapSwitch("state")) {
adj = 300;
popover = $("#landmarkpopover");
} else {
adj = 200;
popover = $("#pointpopover");
}
popover.show();
if (pos.y > 2 * cheight / 3) {
popover.css('left', (pos.x + 5) + 'px');
popover.css('top', (pos.y - adj) + 'px');
} else {
popover.css('left', (pos.x + 5) + 'px');
popover.css('top', (pos.y) + 'px');
}
landmarkpos = getMousePos(e);
}
// Selecting landmarks
if (selected == 1) {
var pagepos = {x: e.pageX, y: e.pageY};
var pos = getMousePos(e);
tochangeindex = -1;
for (var i = 0; i < map_data.landmarks.length; i++) {
var tl, br;
center = map_data.landmarks[i].pos;
tl = {x: center.x * cwidth - 25, y: center.y * cheight - 25};
br = {x: center.x * cwidth + 25, y: center.y * cheight + 38};
if (pos.x < br.x && pos.x > tl.x) {
if (pos.y < br.y && pos.y > tl.y) {
tochangeindex = i;
}
}
}
if (tochangeindex != -1) {
$("#pointpopover").hide();
$("#landmarkpopover").hide();
var popover;
if (map_data.landmarks[tochangeindex].img != "startimg" &&
map_data.landmarks[tochangeindex].img != "endimg" &&
map_data.landmarks[tochangeindex].img != "trainimg" &&
map_data.landmarks[tochangeindex].img != "busimg") {
popover = $("#landmarkpopover");
} else {
popover = $("#pointpopover");
}
popover.show();
popover.css('left', (pagepos.x + 5) + 'px');
popover.css('top', (pagepos.y) + 'px');
}
}
//Remove
if (selected == 4) {
var p = getMousePos(e);
var todelete = null;
for (var i = 0; i < map_data.landmarks.length; i++) {
var center = map_data.landmarks[i].pos;
var tl = {x: center.x * cwidth - 25, y: center.y * cheight - 25};
var br = {x: center.x * cwidth + 25, y: center.y * cheight + 38};
if (p.x < br.x && p.x > tl.x) {
if (p.y < br.y && p.y > tl.y) {
todelete = map_data.landmarks[i];
}
}
}
//If there are no landmarks to be removed then look for closest line
if (todelete == null) {
var closest = 25;
for (var i = 0; i < map_data.lines.length; i++) {
var line = map_data.lines[i];
var bez = new Bezier(line.start.x * cwidth, line.start.y * cheight,
line.ctrl1.x * cwidth, line.ctrl1.y * cheight,
line.ctrl2.x * cwidth, line.ctrl2.y * cheight,
line.end.x * cwidth, line.end.y * cheight);
var points = bez.getLUT();
for (var j = 0; j < points.length; j++) {
var dist = Math.sqrt(Math.pow(p.x - points[j].x, 2) + Math.pow(p.y - points[j].y, 2));
if (dist < closest) {
closest = dist;
todelete = line;
}
}
}
}
if (todelete != null) {
undo_stack.push({action:"delete",data:todelete});
if (todelete.type == "line") {
map_data.lines.splice(map_data.lines.indexOf(todelete),1)
}
if (todelete.type == "landmark") {
map_data.landmarks.splice(map_data.landmarks.indexOf(todelete),1)
}
update_canvas(map_data);
}
}
});
function addElement(elem) {
if (elem.type == "line") {
map_data.lines.push(elem);
}
if (elem.type == "landmark") {
map_data.landmarks.push(elem);
}
update_canvas(map_data);
}
function clear_canvas(canvas, ctx) {
ctx.clearRect(0, 0, canvas.width(), canvas.height());
}
function drawLine(line, ctx) {
var scaling_factor = cwidth/1000;
linestartx = line.start.x * cwidth;
linestarty = line.start.y * cheight;
linectrl1x = line.ctrl1.x * cwidth;
linectrl1y = line.ctrl1.y * cheight;
linectrl2x = line.ctrl2.x * cwidth;
linectrl2y = line.ctrl2.y * cheight;
lineendx = line.end.x * cwidth;
lineendy = line.end.y * cheight;
ctx.beginPath();
ctx.moveTo(linestartx, linestarty);
ctx.bezierCurveTo(linectrl1x,linectrl1y,
linectrl2x,linectrl2y,lineendx,lineendy);
ctx.lineWidth= 1 + 6 * scaling_factor;
ctx.lineCap ='round';
ctx.linejoin ="round";
ctx.strokeStyle = "rgba(0, 153, 255, 0.5)";
ctx.shadowColor = 'rgba(224,255,255,1)';
ctx.shadowBlur = 10 + 20 * scaling_factor;
ctx.stroke();
ctx.closePath();
}
function drawLandmark(landmark, ctx) {
var img = document.getElementById(landmark.img);
var x = landmark.pos.x * cwidth;
var y = landmark.pos.y * cheight;
ctx.drawImage(img, x - 25, y - 25, 50, 50);
y = y + 40;
ctx.font = '' + (13) + 'pt Helvetica';
ctx.textAlign = 'center';
ctx.fillStyle = 'black';
ctx.fillText(landmark.landmarkname, x, y);
}
function update_canvas(obj) {
lines = obj.lines;
landmarks = obj.landmarks;
clear_canvas(map_canvas, ctx);
for (var i = 0; i < lines.length; i++) {
drawLine(lines[i], ctx);
}
for (var i = 0; i < landmarks.length; i++) {
drawLandmark(landmarks[i], ctx);
}
if (redo_stack.length == 0) {
$("#redobutton").attr("disabled", true);
} else {
$("#redobutton").attr("disabled", false);
}
if (undo_stack.length == 0) {
$("#undobutton").attr("disabled", true);
} else {
$("#undobutton").attr("disabled", false);
}
}
$("#submitbutton").click(function(e) {
e.preventDefault();
map_data.name = $("#titleinput").val();
$("#mapdatainput").val(JSON.stringify(map_data));
$('#titlehiddeninput').val(map_data.name);
$("#submitform").submit();
});
});
| app/assets/javascripts/create.js | // Place all the behaviors and hooks related to the matching controller here.
// All this logic will automatically be available in application.js.
$(document).ready(function() {// Javascript object to store all map data
console.log(introTrue);
$("#mobile").hide();
if( /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent) ) {
// You are in mobile browser
$("#notinmobile").hide();
$("#mobile").show();
introTrue = false;
}
//Intro Js QuikMap creation introduction
var createguide = introJs();
createguide.setOptions({
steps: [
{
intro: 'This is the QuikMap creation page'
},
{
element:"#nameMap",
intro: "Name your map after its destination. \
This is so that other people can search for it easily"
},
{
element:"#canvas",
intro:"The map canvas"
},
{
element:"#toolbar",
intro:"The toolbar, with all the tools you need to \
create your own QuikMap. Click on them to know what they do"
},
{
intro:"Now some tips on how to draw a QuikMap"
},
{
element:"#drawbutton",
intro:"<strong>Firstly</strong>, draw the routes/paths \
from your starting point to your endpoint"
},
{
element:"#landmarkbutton",
intro:"<strong>Secondly</strong>, add in the landmarks that\
you can see as you travel along your route. Usually\
<ul>\
<li><b>3</b> per junction</li>\
<li><b>2</b> per lane</li>\
</ul>"
},
{
element:"#submitbutton",
intro:"<strong>Lastly</strong>, remember to submit your QuikMap, so that you can share it!"
}
],
doneLabel: 'Done',
tooltipPosition: 'auto',
positionPrecedence: ['left', 'right', 'bottom', 'top'],
disableInteraction: false
});
if(introTrue){
createguide.start();
}
$('#howtousebutton').click(function(){
console.log("???");
createguide.goToStep(6).start();
});
//bootstrapSwitch
$('#isStraight').bootstrapSwitch('state',false);
$('#isStraight').bootstrapSwitch("onText",'Straight');
$('#isStraight').bootstrapSwitch("offText",'Curvy');
$('#isStraight').bootstrapSwitch("onColor",'primary');
$('#isStraight').bootstrapSwitch("offColor",'info');
$('#isLandmark').bootstrapSwitch('state',false);
$('#isLandmark').bootstrapSwitch("onText",'Landmark');
$('#isLandmark').bootstrapSwitch("offText",'Points');
$('#isLandmark').bootstrapSwitch("onColor",'primary');
$('#isLandmark').bootstrapSwitch("offColor",'primary');
//map_canvas properties
var golden_ratio = 1.61803398875;
var map_canvas = $('#canvas');
var ctx = canvas.getContext("2d");
var container = $("#app"); //container of map_canvas
cwidth = container.width();
cheight = cwidth / golden_ratio;
map_canvas.attr("width",cwidth);
map_canvas.attr("height",cheight);
//canvas resizing on window resize
$(window).resize(function(){
cwidth = container.width();
cheight = cwidth/golden_ratio;
map_canvas.attr("width",cwidth);
map_canvas.attr("height",cheight);
update_canvas(map_data);
});
//map_data properties
var map_data = {name:"Untitled", maxid: 0, lines:[], landmarks: []};
var undo_stack = new Array();
var redo_stack = new Array();
var tochangeindex = null;
/**
*Toolbar controller.
*controls all buttons in toolbar
*/
var selected = 0;
$('#selectdiv').hide();
$('#drawdiv').hide();
$('#landmarkdiv').hide();
$('#removediv').hide();
$('#undobutton').attr("disabled", true);
$('#redobutton').attr("disabled", true);
$('#selectbutton').click( function() {
selected = 1;
});
$('#drawbutton').click( function() {
selected = 2;
});
$('#landmarkbutton').click( function() {
selected = 3;
});
$('#removebutton').click( function() {
selected = 4;
});
$('#undobutton').click( function() {
selected = 0;
var actiontoundo = undo_stack.pop();
if (actiontoundo.action == "line") {
var line_data;
var to_remove = 0;
for (i = 0; i < map_data.lines.length; i++) {
if (map_data.lines[i].id == actiontoundo.id) {
line_data = map_data.lines[i];
to_remove = i;
break;
}
}
map_data.lines.splice(to_remove, 1);
redo_stack.push({action: "line", data: line_data});
}
if (actiontoundo.action == "landmark") {
var landmarkdata;
var to_remove = 0;
for (i = 0; i < map_data.landmarks.length; i++) {
if (map_data.landmarks[i].id == actiontoundo.id) {
landmarkdata = map_data.landmarks[i];
to_remove = i;
break;
}
}
map_data.landmarks.splice(to_remove, 1);
redo_stack.push({action: "landmark", data: landmarkdata});
}
if (actiontoundo.action == "delete") {
var datatoaddback = actiontoundo.data;
if (datatoaddback.type == "line") {
map_data.lines.push(datatoaddback);
}
else if (datatoaddback.type == "landmark") {
map_data.landmarks.push(datatoaddback);
}
redo_stack.push({action:"delete",data: datatoaddback});
}
if (actiontoundo.action == "changelandmark") {
for (i = 0; i < map_data.landmarks.length; i++) {
if (map_data.landmarks[i].id == actiontoundo.id) {
var newlandmarkname = map_data.landmarks[i].landmarkname;
var newlandmarkimg = map_data.landmarks[i].img;
map_data.landmarks[i].landmarkname = actiontoundo.oldname;
map_data.landmarks[i].img = actiontoundo.oldimg;
redo_stack.push({action: "changelandmark", id: actiontoundo.id, newname: newlandmarkname, newimg: newlandmarkimg});
break;
}
}
}
update_canvas(map_data);
});
$('#redobutton').click( function() {
selected = 0;
var actiontoredo = redo_stack.pop();
if (actiontoredo.action == "line") {
var thisid = actiontoredo.data.id;
undo_stack.push({action:"line", id:thisid});
map_data.lines.push(actiontoredo.data);
}
if (actiontoredo.action == "landmark") {
var thisid = actiontoredo.data.id;
undo_stack.push({action:"landmark", id:thisid});
map_data.landmarks.push(actiontoredo.data);
}
if (actiontoredo.action == "delete") {
todelete = actiontoredo.data;
undo_stack.push({action:"delete",data:todelete});
if (todelete.type == "line") {
map_data.lines.splice(map_data.lines.indexOf(todelete),1);
}
if (todelete.type == "landmark") {
map_data.landmarks.splice(map_data.landmarks.indexOf(todelete),1);
}
}
if (actiontoredo.action == "changelandmark") {
for (i = 0; i < map_data.landmarks.length; i++) {
if (map_data.landmarks[i].id == actiontoredo.id) {
var oldname = map_data.landmarks[i].landmarkname;
var oldimg = map_data.landmarks[i].img;
map_data.landmarks[i].landmarkname = actiontoredo.newname;
map_data.landmarks[i].img = actiontoredo.newimg;
undo_stack.push({action: "changelandmark", id: actiontoredo.id, oldname: oldname, oldimg: oldimg});
break;
}
}
}
update_canvas(map_data);
});
$('#toolbar').click( function() {
$('#selectbutton').attr("disabled", false);
$('#drawbutton').attr("disabled", false);
$('#landmarkbutton').attr("disabled", false);
$('#removebutton').attr("disabled", false);
$('#selectdiv').hide();
$('#drawdiv').hide();
$('#landmarkdiv').hide();
$('#removediv').hide();
switch (selected) {
case 1:
$('#selectbutton').attr("disabled", true);
$('#selectdiv').show();
break;
case 2:
$('#drawbutton').attr("disabled", true);
$('#drawdiv').show();
break;
case 3:
$('#landmarkbutton').attr("disabled", true);
$('#landmarkdiv').show();
break;
case 4:
$('#removebutton').attr("disabled", true);
$('#removediv').show();
break;
}
});
/**
* Toolbar button code ends here
*/
/**
* Landmark popup code
*/
$(document).click( function() {
if (selected != 3 && selected != 1) {
$("#pointpopover").hide();
$("#landmarkpopover").hide();
} else if (selected == 3){
if (!$("#isLandmark").bootstrapSwitch("state")) {
$("#landmarkpopover").hide();
} else {
$("#pointpopover").hide();
}
}
});
function createLandmark(img, name) {
if (selected == 1) {
// edit landmark
var oldname = map_data.landmarks[tochangeindex].landmarkname;
var oldimg = map_data.landmarks[tochangeindex].img;
map_data.landmarks[tochangeindex].landmarkname = name;
map_data.landmarks[tochangeindex].img = img;
undo_stack.push({action: "changelandmark", id: map_data.landmarks[tochangeindex].id, oldname: oldname, oldimg: oldimg});
redo_stack.splice(0, redo_stack.length);
update_canvas(map_data);
} else { // new landmark
var thisid = map_data.maxid;
map_data.maxid += 1;
undo_stack.push({action: "landmark", id: thisid});
redo_stack.splice(0, redo_stack.length);
landmarkpos = {x: landmarkpos.x / cwidth*1.0, y: landmarkpos.y / cheight*1.0};
addElement({type: "landmark", id: thisid, landmarkname: name, img: img, pos: landmarkpos});
}
}
$("#startbutton").click( function() {
name = $("#pointinput").val();
if (name == "") {
name = "Start";
}
createLandmark("startimg", name);
$("#pointinput").val("");
$("#pointpopover").hide();
});
$("#endbutton").click( function() {
name = $("#pointinput").val();
if (name == "") {
name = "End";
}
createLandmark("endimg", name);
$("#pointinput").val("");
$("#pointpopover").hide();
});
$("#trainbutton").click( function() {
name = $("#pointinput").val();
if (name == "") {
name = "Station";
}
createLandmark("trainimg", name);
$("#pointinput").val("");
$("#pointpopover").hide();
});
$("#busbutton").click( function() {
name = $("#pointinput").val();
if (name == "") {
name = "Bus Stop";
}
createLandmark("busimg", name);
$("#pointinput").val("");
$("#pointpopover").hide();
});
$('#digitalbutton').click( function() {
name = $("#landmarkinput").val();
if (name == "") {
name = "Digital";
}
createLandmark("digitalimg", name);
$("#landmarkinput").val("");
$("#landmarkpopover").hide();
});
$('#lifestylebutton').click( function() {
name = $("#landmarkinput").val();
if (name == "") {
name = "Lifestyle";
}
createLandmark("lifestyleimg", name);
$("#landmarkinput").val("");
$("#landmarkpopover").hide();
});
$('#foodbutton').click( function() {
name = $("#landmarkinput").val();
if (name == "") {
name = "F&B";
}
createLandmark("foodimg", name);
$("#landmarkinput").val("");
$("#landmarkpopover").hide();
});
$('#fashionbutton').click( function() {
name = $("#landmarkinput").val();
if (name == "") {
name = "Fashion";
}
createLandmark("fashionimg", name);
$("#landmarkinput").val("");
$("#landmarkpopover").hide();
});
$('#servicesbutton').click( function() {
name = $("#landmarkinput").val();
if (name == "") {
name = "Service";
}
createLandmark("servicesimg", name);
$("#landmarkinput").val("");
$("#landmarkpopover").hide();
});
$('#structuresbutton').click( function() {
name = $("#landmarkinput").val();
if (name == "") {
name = "Structure";
}
createLandmark("structuresimg", name);
$("#landmarkinput").val("");
$("#landmarkpopover").hide();
});
/**
* Landmark popup code ends here
*/
// Canvas Manipulation
var penDown = false;
var startpos;
var points = new Array();
function getMousePos(e) {
return {
x: e.offsetX,
y: e.offsetY
};
}
function get_control_points(start,q1,q2,end){
//Magic number, with golden ratio!
extend_coefficient = 0.5*golden_ratio;
function form_equation(q1,q2){
function equation(x){
gradient = (q2.y - q1.y)/(q2.x-q1.x);
c = q1.y - gradient*q1.x;
return gradient*x + c;
}
return equation;
}
var control1 = {x:0,y:0};
var control2 = {x:0,y:0};
control1.x = Math.floor(q1.x + extend_coefficient*(q1.x-start.x));
control2.x = Math.floor(q2.x - extend_coefficient*(end.x-q2.x));
control1.y = Math.floor(form_equation(start,q1)(control1.x));
control2.y = Math.floor(form_equation(q2,end)(control2.x));
return [control1,control2];
}
map_canvas.on('mousedown',function(e) {
if (penDown === false && selected == 2) {
penDown = true;
var pos = getMousePos(e);
startpos = pos;
ctx.beginPath();
ctx.moveTo(pos.x,pos.y);
}
});
map_canvas.on('mousemove',function(e){
if(penDown === true){
var pos = getMousePos(e);
ctx.lineTo(pos.x,pos.y);
points.push(pos);
ctx.lineWidth= 7;
ctx.lineCap ='round';
ctx.linejoin ="round";
ctx.strokeStyle = "rgba(0, 153, 255, 0.04)";
ctx.shadowColor = 'rgba(224,255,255,0.1)';
ctx.shadowBlur = 30;
ctx.stroke();
}
});
map_canvas.on('mouseup',function(e) {
if (penDown === true && selected == 2) {
ctx.closePath();
penDown = false;
var pos = getMousePos(e);
var thisid = map_data.maxid;
undo_stack.push({action: "line", id: thisid});
redo_stack.splice(0, redo_stack.length);
var controlpoints, quarter1, quarter2;
if($('#isStraight').bootstrapSwitch("state")){
controlpoints = [startpos,pos];
}
else{
quarter1 = points[Math.floor((points.length)/4)];
quarter2 = points[Math.floor((points.length)*3/4)];
controlpoints = get_control_points(startpos,quarter1,quarter2,pos);
}
addElement({type: "line", id: thisid,
start: {x: startpos.x / cwidth*1.0, y: startpos.y / cheight*1.0},
ctrl1: {x: controlpoints[0].x / cwidth*1.0, y:controlpoints[0].y / cheight*1.0},
ctrl2:{x:controlpoints[1].x / cwidth*1.0, y:controlpoints[1].y / cheight*1.0},
end:{x: pos.x / cwidth*1.0, y: pos.y / cheight*1.0}});
points = new Array();
map_data.maxid += 1;
}
});
var landmarkpos;
//Area formulas for triangle
triAF = function(p1,p2,p3){
return Math.abs(1/2*(p1.x*p2.y+p2.x*p3.y+p3.x*p1.y - p1.y*p2.x - p2.y*p3.x - p3.y*p1.x));
}
//Area formula for quadrilaterals
quadAF = function(p1,p2,p3,p4){
return Math.abs(1/2*(p1.x*p2.y+p2.x*p3.y+p3.x*p4.y+p4.x*p1.y- p1.y*p2.x - p2.y*p3.x - p3.y*p4.x -p4.y*p1.x));
}
//shortest distance to a point formula
shrtD = function(p1,line){
return Math.abs(((line.start.y-line.end.y)/(line.end.x-line.start.x))*p1.x+p1.y+((line.start.y-line.end.y)/(line.end.x-line.start.x))*p1.x-p1.y)/(Math.sqrt(Math.pow((line.start.y-line.end.y)/(line.end.x-line.start.x),2)+1));
}
map_canvas.click(function(e) {
//Arbitrary number for rectangle approximation
var rectApprox = 20;
//Adding landmarks
if (selected == 3) {
var pos = {x: e.pageX, y: e.pageY};
var adj;
if ($("#isLandmark").bootstrapSwitch("state")) {
adj = 300;
popover = $("#landmarkpopover");
} else {
adj = 200;
popover = $("#pointpopover");
}
popover.show();
if (pos.y > 2 * cheight / 3) {
popover.css('left', (pos.x + 5) + 'px');
popover.css('top', (pos.y - adj) + 'px');
} else {
popover.css('left', (pos.x + 5) + 'px');
popover.css('top', (pos.y) + 'px');
}
landmarkpos = getMousePos(e);
}
// Selecting landmarks
if (selected == 1) {
var pagepos = {x: e.pageX, y: e.pageY};
var pos = getMousePos(e);
tochangeindex = -1;
for (var i = 0; i < map_data.landmarks.length; i++) {
var tl, br;
center = map_data.landmarks[i].pos;
tl = {x: center.x * cwidth - 25, y: center.y * cheight - 25};
br = {x: center.x * cwidth + 25, y: center.y * cheight + 38};
if (pos.x < br.x && pos.x > tl.x) {
if (pos.y < br.y && pos.y > tl.y) {
tochangeindex = i;
}
}
}
if (tochangeindex != -1) {
$("#pointpopover").hide();
$("#landmarkpopover").hide();
var popover;
if (map_data.landmarks[tochangeindex].img != "startimg" &&
map_data.landmarks[tochangeindex].img != "endimg" &&
map_data.landmarks[tochangeindex].img != "trainimg" &&
map_data.landmarks[tochangeindex].img != "busimg") {
popover = $("#landmarkpopover");
} else {
popover = $("#pointpopover");
}
popover.show();
popover.css('left', (pagepos.x + 5) + 'px');
popover.css('top', (pagepos.y) + 'px');
}
}
//Remove
if (selected == 4) {
var p = getMousePos(e);
var todelete = null;
for (var i = 0; i < map_data.landmarks.length; i++) {
var center = map_data.landmarks[i].pos;
var tl = {x: center.x * cwidth - 25, y: center.y * cheight - 25};
var br = {x: center.x * cwidth + 25, y: center.y * cheight + 38};
if (p.x < br.x && p.x > tl.x) {
if (p.y < br.y && p.y > tl.y) {
todelete = map_data.landmarks[i];
}
}
}
//If there are no landmarks to be removed then look for closest line
if (todelete == null) {
var closest = 25;
for (var i = 0; i < map_data.lines.length; i++) {
var line = map_data.lines[i];
var bez = new Bezier(line.start.x * cwidth, line.start.y * cheight,
line.ctrl1.x * cwidth, line.ctrl1.y * cheight,
line.ctrl2.x * cwidth, line.ctrl2.y * cheight,
line.end.x * cwidth, line.end.y * cheight);
var points = bez.getLUT();
for (var j = 0; j < points.length; j++) {
var dist = Math.sqrt(Math.pow(p.x - points[j].x, 2) + Math.pow(p.y - points[j].y, 2));
if (dist < closest) {
closest = dist;
todelete = line;
}
}
}
}
if (todelete != null) {
undo_stack.push({action:"delete",data:todelete});
if (todelete.type == "line") {
map_data.lines.splice(map_data.lines.indexOf(todelete),1)
}
if (todelete.type == "landmark") {
map_data.landmarks.splice(map_data.landmarks.indexOf(todelete),1)
}
update_canvas(map_data);
}
}
});
function addElement(elem) {
if (elem.type == "line") {
map_data.lines.push(elem);
}
if (elem.type == "landmark") {
map_data.landmarks.push(elem);
}
update_canvas(map_data);
}
function clear_canvas(canvas, ctx) {
ctx.clearRect(0, 0, canvas.width(), canvas.height());
}
function drawLine(line, ctx) {
var scaling_factor = cwidth/1000;
linestartx = line.start.x * cwidth;
linestarty = line.start.y * cheight;
linectrl1x = line.ctrl1.x * cwidth;
linectrl1y = line.ctrl1.y * cheight;
linectrl2x = line.ctrl2.x * cwidth;
linectrl2y = line.ctrl2.y * cheight;
lineendx = line.end.x * cwidth;
lineendy = line.end.y * cheight;
ctx.beginPath();
ctx.moveTo(linestartx, linestarty);
ctx.bezierCurveTo(linectrl1x,linectrl1y,
linectrl2x,linectrl2y,lineendx,lineendy);
ctx.lineWidth= 1 + 6 * scaling_factor;
ctx.lineCap ='round';
ctx.linejoin ="round";
ctx.strokeStyle = "rgba(0, 153, 255, 0.5)";
ctx.shadowColor = 'rgba(224,255,255,1)';
ctx.shadowBlur = 10 + 20 * scaling_factor;
ctx.stroke();
ctx.closePath();
}
function drawLandmark(landmark, ctx) {
var img = document.getElementById(landmark.img);
var x = landmark.pos.x * cwidth;
var y = landmark.pos.y * cheight;
ctx.drawImage(img, x - 25, y - 25, 50, 50);
y = y + 40;
ctx.font = '' + (13) + 'pt Helvetica';
ctx.textAlign = 'center';
ctx.fillStyle = 'black';
ctx.fillText(landmark.landmarkname, x, y);
}
function update_canvas(obj) {
lines = obj.lines;
landmarks = obj.landmarks;
clear_canvas(map_canvas, ctx);
for (var i = 0; i < lines.length; i++) {
drawLine(lines[i], ctx);
}
for (var i = 0; i < landmarks.length; i++) {
drawLandmark(landmarks[i], ctx);
}
if (redo_stack.length == 0) {
$("#redobutton").attr("disabled", true);
} else {
$("#redobutton").attr("disabled", false);
}
if (undo_stack.length == 0) {
$("#undobutton").attr("disabled", true);
} else {
$("#undobutton").attr("disabled", false);
}
}
$("#submitbutton").click(function(e) {
e.preventDefault();
map_data.name = $("#titleinput").val();
$("#mapdatainput").val(JSON.stringify(map_data));
$('#titlehiddeninput').val(map_data.name);
$("#submitform").submit();
});
});
| Fixes division by zero error
| app/assets/javascripts/create.js | Fixes division by zero error | <ide><path>pp/assets/javascripts/create.js
<ide> function form_equation(q1,q2){
<ide>
<ide> function equation(x){
<del> gradient = (q2.y - q1.y)/(q2.x-q1.x);
<add> var REALLYFUCKINGHIGHVALUE = 9999999999999;
<add> if(q1.x == q2.x) {
<add> gradient = REALLYFUCKINGHIGHVALUE;
<add> } else {
<add> gradient = (q2.y - q1.y)/(q2.x - q1.x);
<add> }
<ide> c = q1.y - gradient*q1.x;
<ide> return gradient*x + c;
<ide> }
<ide> ctx.lineTo(pos.x,pos.y);
<ide>
<ide> points.push(pos);
<add> console.log(pos);
<ide>
<ide> ctx.lineWidth= 7;
<ide> ctx.lineCap ='round';
<ide>
<ide> }
<ide> else{
<del> quarter1 = points[Math.floor((points.length)/4)];
<del> quarter2 = points[Math.floor((points.length)*3/4)];
<del>
<del> controlpoints = get_control_points(startpos,quarter1,quarter2,pos);
<add>
<add> quarter1 = points[Math.floor((points.length)/4)];
<add> console.log("q1: " + quarter1.x + ", " + quarter1.y);
<add> quarter2 = points[Math.floor((points.length)*3/4)];
<add> console.log("q2: " + quarter2.x + ", " + quarter2.y);
<add>
<add> controlpoints = get_control_points(startpos,quarter1,quarter2,pos);
<ide> }
<ide>
<ide> addElement({type: "line", id: thisid, |
|
Java | agpl-3.0 | 1767125b46374ce9db8def23c76e54b19a6746e6 | 0 | cojen/Tupl | /*
* Copyright 2011-2012 Brian S O'Neill
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cojen.tupl;
import java.io.EOFException;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.Random;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.FileChannel;
import java.security.GeneralSecurityException;
/**
*
*
* @author Brian S O'Neill
*/
final class RedoLog extends CauseCloseable implements Checkpointer.Shutdown {
private static final long MAGIC_NUMBER = 431399725605778814L;
private static final int ENCODING_VERSION = 20120801;
private static final byte
/** timestamp: long */
OP_TIMESTAMP = 1,
/** timestamp: long */
OP_SHUTDOWN = 2,
/** timestamp: long */
OP_CLOSE = 3,
/** timestamp: long */
OP_END_FILE = 4,
/** txnId: long */
//OP_TXN_BEGIN = 5,
/** txnId: long, parentTxnId: long */
//OP_TXN_BEGIN_CHILD = 6,
/** txnId: long */
//OP_TXN_CONTINUE = 7,
/** txnId: long, parentTxnId: long */
//OP_TXN_CONTINUE_CHILD = 8,
/** txnId: long */
OP_TXN_ROLLBACK = 9,
/** txnId: long, parentTxnId: long */
OP_TXN_ROLLBACK_CHILD = 10,
/** txnId: long */
OP_TXN_COMMIT = 11,
/** txnId: long, parentTxnId: long */
OP_TXN_COMMIT_CHILD = 12,
/** indexId: long, keyLength: varInt, key: bytes, valueLength: varInt, value: bytes */
OP_STORE = 16,
/** indexId: long, keyLength: varInt, key: bytes */
OP_DELETE = 17,
/** indexId: long */
//OP_CLEAR = 18,
/** txnId: long, indexId: long, keyLength: varInt, key: bytes,
valueLength: varInt, value: bytes */
OP_TXN_STORE = 19,
/** txnId: long, indexId: long, keyLength: varInt, key: bytes,
valueLength: varInt, value: bytes */
OP_TXN_STORE_COMMIT = 20,
/** txnId: long, parentTxnId: long, indexId: long, keyLength: varInt, key: bytes,
valueLength: varInt, value: bytes */
OP_TXN_STORE_COMMIT_CHILD = 21,
/** txnId: long, indexId: long, keyLength: varInt, key: bytes */
OP_TXN_DELETE = 22,
/** txnId: long, indexId: long, keyLength: varInt, key: bytes */
OP_TXN_DELETE_COMMIT = 23,
/** txnId: long, parentTxnId: long, indexId: long, keyLength: varInt, key: bytes */
OP_TXN_DELETE_COMMIT_CHILD = 24;
/** txnId: long, indexId: long */
//OP_TXN_CLEAR = 25,
/** txnId: long, indexId: long */
//OP_TXN_CLEAR_COMMIT = 26,
/** txnId: long, parentTxnId: long, indexId: long */
//OP_TXN_CLEAR_COMMIT_CHILD = 27,
/** length: varInt, data: bytes */
//OP_CUSTOM = (byte) 128,
/** txnId: long, length: varInt, data: bytes */
//OP_TXN_CUSTOM = (byte) 129;
private static int randomInt() {
Random rnd = new Random();
int x;
// Cannot return zero, since it breaks Xorshift RNG.
while ((x = rnd.nextInt()) == 0);
return x;
}
private final Crypto mCrypto;
private final File mBaseFile;
private final byte[] mBuffer;
private int mBufferPos;
private final boolean mReplayMode;
private long mLogId;
private OutputStream mOut;
private volatile FileChannel mChannel;
private boolean mAlwaysFlush;
private int mTermRndSeed;
private volatile Throwable mCause;
/**
* @oaram logId first log id to open
*/
RedoLog(Crypto crypto, File baseFile, long logId, boolean replay) throws IOException {
mCrypto = crypto;
mBaseFile = baseFile;
mBuffer = new byte[4096];
mReplayMode = replay;
synchronized (this) {
mLogId = logId;
if (!replay) {
openNewFile(logId);
}
}
}
synchronized long logId() {
return mLogId;
}
/**
* @param scanned files scanned in previous replay
* @return all the files which were replayed
*/
synchronized Set<File> replay(RedoLogVisitor visitor, Set<File> scanned,
EventListener listener, EventType type, String message)
throws IOException
{
if (!mReplayMode || mBaseFile == null) {
throw new IllegalStateException();
}
try {
Set<File> files = new LinkedHashSet<File>(2);
while (true) {
File file = fileFor(mBaseFile, mLogId);
if (scanned != null && !scanned.contains(file)) {
break;
}
InputStream in;
try {
in = new FileInputStream(file);
} catch (FileNotFoundException e) {
break;
}
try {
if (mCrypto != null) {
try {
in = mCrypto.newDecryptingStream(mLogId, in);
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new DatabaseException(e);
}
}
if (listener != null) {
listener.notify(type, message, mLogId);
}
files.add(file);
replay(new DataIn(in), visitor);
} catch (EOFException e) {
// End of log didn't get completely flushed.
} finally {
Utils.closeQuietly(null, in);
}
mLogId++;
}
return files;
} catch (IOException e) {
throw Utils.rethrow(e, mCause);
}
}
static void deleteOldFile(File baseFile, long logId) {
fileFor(baseFile, logId).delete();
}
void deleteOldFile(long logId) {
deleteOldFile(mBaseFile, logId);
}
/**
* @return old log file id, which is one less than new one
*/
long openNewFile() throws IOException {
if (mReplayMode) {
throw new IllegalStateException();
}
final long oldLogId;
synchronized (this) {
oldLogId = mLogId;
}
openNewFile(oldLogId + 1);
return oldLogId;
}
private void openNewFile(long logId) throws IOException {
final File file = fileFor(mBaseFile, logId);
if (file.exists()) {
throw new FileNotFoundException("Log file already exists: " + file.getPath());
}
final OutputStream out;
final FileChannel channel;
final int termRndSeed;
{
FileOutputStream fout = new FileOutputStream(file);
channel = fout.getChannel();
if (mCrypto == null) {
out = fout;
} else {
try {
out = mCrypto.newEncryptingStream(logId, fout);
} catch (GeneralSecurityException e) {
throw new DatabaseException(e);
}
}
byte[] buf = new byte[8 + 4 + 8 + 4];
int offset = 0;
Utils.writeLongLE(buf, offset, MAGIC_NUMBER); offset += 8;
Utils.writeIntLE(buf, offset, ENCODING_VERSION); offset += 4;
Utils.writeLongLE(buf, offset, logId); offset += 8;
Utils.writeIntLE(buf, offset, termRndSeed = randomInt()); offset += 4;
if (offset != buf.length) {
throw new AssertionError();
}
try {
out.write(buf);
} catch (IOException e) {
Utils.closeQuietly(null, out);
file.delete();
throw e;
}
}
final OutputStream oldOut;
final FileChannel oldChannel;
synchronized (this) {
oldOut = mOut;
oldChannel = mChannel;
if (oldOut != null) {
writeOp(OP_END_FILE, System.currentTimeMillis());
writeTerminator();
doFlush();
}
mOut = out;
mChannel = channel;
mTermRndSeed = termRndSeed;
mLogId = logId;
timestamp();
}
try {
if (oldChannel != null) {
oldChannel.force(true);
}
} catch (IOException e) {
// Ignore.
}
Utils.closeQuietly(null, oldOut);
}
/**
* @return null if non-durable
*/
private static File fileFor(File base, long logId) {
return base == null ? null : new File(base.getPath() + ".redo." + logId);
}
public long size() throws IOException {
FileChannel channel = mChannel;
return channel == null ? 0 : channel.size();
}
public synchronized void flush() throws IOException {
doFlush();
}
public void sync() throws IOException {
flush();
force(false);
}
private void force(boolean metadata) throws IOException {
FileChannel channel = mChannel;
if (channel != null) {
try {
channel.force(metadata);
} catch (ClosedChannelException e) {
} catch (IOException e) {
throw Utils.rethrow(e, mCause);
}
}
}
@Override
public synchronized void close() throws IOException {
close(null);
}
@Override
public synchronized void close(Throwable cause) throws IOException {
if (cause != null) {
mCause = cause;
}
shutdown(OP_CLOSE);
}
@Override
public void shutdown() {
try {
shutdown(OP_SHUTDOWN);
} catch (IOException e) {
// Ignore.
}
}
void shutdown(byte op) throws IOException {
synchronized (this) {
mAlwaysFlush = true;
if (mChannel == null || !mChannel.isOpen()) {
return;
}
writeOp(op, System.currentTimeMillis());
writeTerminator();
doFlush();
if (op == OP_CLOSE) {
mChannel.force(true);
mChannel.close();
return;
}
}
force(true);
}
public void store(long indexId, byte[] key, byte[] value, DurabilityMode mode)
throws IOException
{
if (key == null) {
throw new NullPointerException("Key is null");
}
boolean sync;
synchronized (this) {
if (value == null) {
writeOp(OP_DELETE, indexId);
writeUnsignedVarInt(key.length);
writeBytes(key);
} else {
writeOp(OP_STORE, indexId);
writeUnsignedVarInt(key.length);
writeBytes(key);
writeUnsignedVarInt(value.length);
writeBytes(value);
}
writeTerminator();
sync = conditionalFlush(mode);
}
if (sync) {
force(false);
}
}
public synchronized void txnRollback(long txnId, long parentTxnId) throws IOException {
if (parentTxnId == 0) {
writeOp(OP_TXN_ROLLBACK, txnId);
} else {
writeOp(OP_TXN_ROLLBACK_CHILD, txnId);
writeLongLE(parentTxnId);
}
writeTerminator();
}
/**
* @return true if caller should call txnCommitSync
*/
public synchronized boolean txnCommitFull(long txnId, DurabilityMode mode) throws IOException {
writeOp(OP_TXN_COMMIT, txnId);
writeTerminator();
return conditionalFlush(mode);
}
/**
* Called after txnCommitFull.
*/
public void txnCommitSync() throws IOException {
force(false);
}
public synchronized void txnCommitScope(long txnId, long parentTxnId) throws IOException {
writeOp(OP_TXN_COMMIT_CHILD, txnId);
writeLongLE(parentTxnId);
writeTerminator();
}
public synchronized void txnStore(long txnId, long indexId, byte[] key, byte[] value)
throws IOException
{
if (key == null) {
throw new NullPointerException("Key is null");
}
if (value == null) {
writeOp(OP_TXN_DELETE, txnId);
writeLongLE(indexId);
writeUnsignedVarInt(key.length);
writeBytes(key);
} else {
writeOp(OP_TXN_STORE, txnId);
writeLongLE(indexId);
writeUnsignedVarInt(key.length);
writeBytes(key);
writeUnsignedVarInt(value.length);
writeBytes(value);
}
writeTerminator();
}
/*
public synchronized void txnStoreCommit(long txnId, long parentTxnId,
long indexId, byte[] key, byte[] value)
throws IOException
{
if (key == null) {
throw new NullPointerException("Key is null");
}
if (value == null) {
if (parentTxnId == 0) {
writeOp(OP_TXN_DELETE_COMMIT, txnId);
} else {
writeOp(OP_TXN_DELETE_COMMIT_CHILD, txnId);
writeLongLE(parentTxnId);
}
writeLongLE(indexId);
writeUnsignedVarInt(key.length);
writeBytes(key);
} else {
if (parentTxnId == 0) {
writeOp(OP_TXN_STORE_COMMIT, txnId);
} else {
writeOp(OP_TXN_STORE_COMMIT_CHILD, txnId);
writeLongLE(parentTxnId);
}
writeLongLE(indexId);
writeUnsignedVarInt(key.length);
writeBytes(key);
writeUnsignedVarInt(value.length);
writeBytes(value);
}
writeTerminator();
}
*/
synchronized void timestamp() throws IOException {
writeOp(OP_TIMESTAMP, System.currentTimeMillis());
writeTerminator();
}
// Caller must be synchronized.
private void writeIntLE(int v) throws IOException {
byte[] buffer = mBuffer;
int pos = mBufferPos;
if (pos > buffer.length - 4) {
doFlush(buffer, pos);
pos = 0;
}
Utils.writeIntLE(buffer, pos, v);
mBufferPos = pos + 4;
}
// Caller must be synchronized.
private void writeLongLE(long v) throws IOException {
byte[] buffer = mBuffer;
int pos = mBufferPos;
if (pos > buffer.length - 8) {
doFlush(buffer, pos);
pos = 0;
}
Utils.writeLongLE(buffer, pos, v);
mBufferPos = pos + 8;
}
// Caller must be synchronized.
private void writeOp(byte op, long operand) throws IOException {
byte[] buffer = mBuffer;
int pos = mBufferPos;
if (pos >= buffer.length - 9) {
doFlush(buffer, pos);
pos = 0;
}
buffer[pos] = op;
Utils.writeLongLE(buffer, pos + 1, operand);
mBufferPos = pos + 9;
}
// Caller must be synchronized.
private void writeTerminator() throws IOException {
writeIntLE(nextTermRnd());
}
// Caller must be synchronized (replay is exempt)
private int nextTermRnd() throws IOException {
// Xorshift RNG by George Marsaglia.
int x = mTermRndSeed;
x ^= x << 13;
x ^= x >>> 17;
x ^= x << 5;
mTermRndSeed = x;
return x;
}
// Caller must be synchronized.
private void writeUnsignedVarInt(int v) throws IOException {
byte[] buffer = mBuffer;
int pos = mBufferPos;
if (pos > buffer.length - 5) {
doFlush(buffer, pos);
pos = 0;
}
mBufferPos = Utils.writeUnsignedVarInt(buffer, pos, v);
}
// Caller must be synchronized.
private void writeBytes(byte[] bytes) throws IOException {
writeBytes(bytes, 0, bytes.length);
}
// Caller must be synchronized.
private void writeBytes(byte[] bytes, int offset, int length) throws IOException {
if (length == 0) {
return;
}
byte[] buffer = mBuffer;
int pos = mBufferPos;
while (true) {
if (pos <= buffer.length - length) {
System.arraycopy(bytes, offset, buffer, pos, length);
mBufferPos = pos + length;
return;
}
int remaining = buffer.length - pos;
System.arraycopy(bytes, offset, buffer, pos, remaining);
doFlush(buffer, buffer.length);
pos = 0;
offset += remaining;
length -= remaining;
}
}
// Caller must be synchronized. Returns true if caller should sync.
private boolean conditionalFlush(DurabilityMode mode) throws IOException {
switch (mode) {
default:
return false;
case NO_FLUSH:
if (mAlwaysFlush) {
doFlush();
}
return false;
case SYNC:
doFlush();
return true;
case NO_SYNC:
doFlush();
return false;
}
}
// Caller must be synchronized.
private void doFlush() throws IOException {
doFlush(mBuffer, mBufferPos);
}
// Caller must be synchronized.
private void doFlush(byte[] buffer, int pos) throws IOException {
try {
mOut.write(buffer, 0, pos);
mBufferPos = 0;
} catch (IOException e) {
throw Utils.rethrow(e, mCause);
}
}
private void replay(DataIn in, RedoLogVisitor visitor) throws IOException {
long magic = in.readLongLE();
if (magic != MAGIC_NUMBER) {
if (magic == 0) {
// Assume file was flushed improperly and discard it.
return;
}
throw new DatabaseException("Incorrect magic number in redo log file");
}
int version = in.readIntLE();
if (version != ENCODING_VERSION) {
throw new DatabaseException("Unsupported redo log encoding version: " + version);
}
long id = in.readLongLE();
if (id != mLogId) {
throw new DatabaseException
("Expected redo log identifier of " + mLogId + ", but actual is: " + id);
}
mTermRndSeed = in.readIntLE();
int op;
while ((op = in.read()) >= 0) {
long operand = in.readLongLE();
switch (op &= 0xff) {
default:
throw new DatabaseException("Unknown redo log operation: " + op);
case 0:
// Assume redo log did not flush completely.
return;
case OP_TIMESTAMP:
if (!verifyTerminator(in)) {
return;
}
visitor.timestamp(operand);
break;
case OP_SHUTDOWN:
if (!verifyTerminator(in)) {
return;
}
visitor.shutdown(operand);
break;
case OP_CLOSE:
if (!verifyTerminator(in)) {
return;
}
visitor.close(operand);
break;
case OP_END_FILE:
if (!verifyTerminator(in)) {
return;
}
visitor.endFile(operand);
break;
case OP_TXN_ROLLBACK:
if (!verifyTerminator(in)) {
return;
}
visitor.txnRollback(operand, 0);
break;
case OP_TXN_ROLLBACK_CHILD:
long parentTxnId = in.readLongLE();
if (!verifyTerminator(in)) {
return;
}
visitor.txnRollback(operand, parentTxnId);
break;
case OP_TXN_COMMIT:
if (!verifyTerminator(in)) {
return;
}
visitor.txnCommit(operand, 0);
break;
case OP_TXN_COMMIT_CHILD:
parentTxnId = in.readLongLE();
if (!verifyTerminator(in)) {
return;
}
visitor.txnCommit(operand, parentTxnId);
break;
case OP_STORE:
byte[] key = in.readBytes();
byte[] value = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.store(operand, key, value);
break;
case OP_DELETE:
key = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.store(operand, key, null);
break;
case OP_TXN_STORE:
long indexId = in.readLongLE();
key = in.readBytes();
value = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.txnStore(operand, indexId, key, value);
break;
case OP_TXN_STORE_COMMIT:
indexId = in.readLongLE();
key = in.readBytes();
value = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.txnStore(operand, indexId, key, value);
visitor.txnCommit(operand, 0);
break;
case OP_TXN_STORE_COMMIT_CHILD:
parentTxnId = in.readLongLE();
indexId = in.readLongLE();
key = in.readBytes();
value = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.txnStore(operand, indexId, key, value);
visitor.txnCommit(operand, parentTxnId);
break;
case OP_TXN_DELETE:
indexId = in.readLongLE();
key = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.txnStore(operand, indexId, key, null);
break;
case OP_TXN_DELETE_COMMIT:
indexId = in.readLongLE();
key = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.txnStore(operand, indexId, key, null);
visitor.txnCommit(operand, 0);
break;
case OP_TXN_DELETE_COMMIT_CHILD:
parentTxnId = in.readLongLE();
indexId = in.readLongLE();
key = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.txnStore(operand, indexId, key, null);
visitor.txnCommit(operand, parentTxnId);
break;
}
}
}
/**
* If false is returned, assume rest of log file is corrupt.
*/
private boolean verifyTerminator(DataIn in) throws IOException {
try {
return in.readIntLE() == nextTermRnd();
} catch (EOFException e) {
return false;
}
}
}
| src/main/java/org/cojen/tupl/RedoLog.java | /*
* Copyright 2011-2012 Brian S O'Neill
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cojen.tupl;
import java.io.EOFException;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.Random;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.FileChannel;
import java.security.GeneralSecurityException;
/**
*
*
* @author Brian S O'Neill
*/
final class RedoLog extends CauseCloseable implements Checkpointer.Shutdown {
private static final long MAGIC_NUMBER = 431399725605778814L;
private static final int ENCODING_VERSION = 20120801;
private static final byte
/** timestamp: long */
OP_TIMESTAMP = 1,
/** timestamp: long */
OP_SHUTDOWN = 2,
/** timestamp: long */
OP_CLOSE = 3,
/** timestamp: long */
OP_END_FILE = 4,
/** txnId: long */
//OP_TXN_BEGIN = 5,
/** txnId: long, parentTxnId: long */
//OP_TXN_BEGIN_CHILD = 6,
/** txnId: long */
//OP_TXN_CONTINUE = 7,
/** txnId: long, parentTxnId: long */
//OP_TXN_CONTINUE_CHILD = 8,
/** txnId: long */
OP_TXN_ROLLBACK = 9,
/** txnId: long, parentTxnId: long */
OP_TXN_ROLLBACK_CHILD = 10,
/** txnId: long */
OP_TXN_COMMIT = 11,
/** txnId: long, parentTxnId: long */
OP_TXN_COMMIT_CHILD = 12,
/** indexId: long, keyLength: varInt, key: bytes, valueLength: varInt, value: bytes */
OP_STORE = 16,
/** indexId: long, keyLength: varInt, key: bytes */
OP_DELETE = 17,
/** indexId: long */
//OP_CLEAR = 18,
/** txnId: long, indexId: long, keyLength: varInt, key: bytes,
valueLength: varInt, value: bytes */
OP_TXN_STORE = 19,
/** txnId: long, indexId: long, keyLength: varInt, key: bytes,
valueLength: varInt, value: bytes */
OP_TXN_STORE_COMMIT = 20,
/** txnId: long, parentTxnId: long, indexId: long, keyLength: varInt, key: bytes,
valueLength: varInt, value: bytes */
OP_TXN_STORE_COMMIT_CHILD = 21,
/** txnId: long, indexId: long, keyLength: varInt, key: bytes */
OP_TXN_DELETE = 22,
/** txnId: long, indexId: long, keyLength: varInt, key: bytes */
OP_TXN_DELETE_COMMIT = 23,
/** txnId: long, parentTxnId: long, indexId: long, keyLength: varInt, key: bytes */
OP_TXN_DELETE_COMMIT_CHILD = 24;
/** txnId: long, indexId: long */
//OP_TXN_CLEAR = 25,
/** txnId: long, indexId: long */
//OP_TXN_CLEAR_COMMIT = 26,
/** txnId: long, parentTxnId: long, indexId: long */
//OP_TXN_CLEAR_COMMIT_CHILD = 27,
/** length: varInt, data: bytes */
//OP_CUSTOM = (byte) 128,
/** txnId: long, length: varInt, data: bytes */
//OP_TXN_CUSTOM = (byte) 129;
private static int randomInt() {
Random rnd = new Random();
int x;
// Cannot return zero, since it breaks Xorshift RNG.
while ((x = rnd.nextInt()) == 0);
return x;
}
private final Crypto mCrypto;
private final File mBaseFile;
private final byte[] mBuffer;
private int mBufferPos;
private final boolean mReplayMode;
private long mLogId;
private OutputStream mOut;
private volatile FileChannel mChannel;
private boolean mAlwaysFlush;
private int mTermRndSeed;
private volatile Throwable mCause;
/**
* @oaram logId first log id to open
*/
RedoLog(Crypto crypto, File baseFile, long logId, boolean replay) throws IOException {
mCrypto = crypto;
mBaseFile = baseFile;
mBuffer = new byte[4096];
mReplayMode = replay;
synchronized (this) {
mLogId = logId;
if (!replay) {
openNewFile(logId);
}
}
}
synchronized long logId() {
return mLogId;
}
/**
* @param scanned files scanned in previous replay
* @return all the files which were replayed
*/
synchronized Set<File> replay(RedoLogVisitor visitor, Set<File> scanned,
EventListener listener, EventType type, String message)
throws IOException
{
if (!mReplayMode || mBaseFile == null) {
throw new IllegalStateException();
}
try {
Set<File> files = new LinkedHashSet<File>(2);
while (true) {
File file = fileFor(mBaseFile, mLogId);
if (scanned != null && !scanned.contains(file)) {
break;
}
InputStream in;
try {
in = new FileInputStream(file);
} catch (FileNotFoundException e) {
break;
}
try {
if (mCrypto != null) {
try {
in = mCrypto.newDecryptingStream(mLogId, in);
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new DatabaseException(e);
}
}
if (listener != null) {
listener.notify(type, message, mLogId);
}
files.add(file);
replay(new DataIn(in), visitor);
} catch (EOFException e) {
// End of log didn't get completely flushed.
} finally {
Utils.closeQuietly(null, in);
}
mLogId++;
}
return files;
} catch (IOException e) {
throw Utils.rethrow(e, mCause);
}
}
static void deleteOldFile(File baseFile, long logId) {
fileFor(baseFile, logId).delete();
}
void deleteOldFile(long logId) {
deleteOldFile(mBaseFile, logId);
}
/**
* @return old log file id, which is one less than new one
*/
long openNewFile() throws IOException {
if (mReplayMode) {
throw new IllegalStateException();
}
final long oldLogId;
synchronized (this) {
oldLogId = mLogId;
}
openNewFile(oldLogId + 1);
return oldLogId;
}
private void openNewFile(long logId) throws IOException {
final File file = fileFor(mBaseFile, logId);
if (file.exists()) {
throw new FileNotFoundException("Log file already exists: " + file.getPath());
}
final OutputStream out;
final FileChannel channel;
final int termRndSeed;
{
FileOutputStream fout = new FileOutputStream(file);
channel = fout.getChannel();
if (mCrypto == null) {
out = fout;
} else {
try {
out = mCrypto.newEncryptingStream(logId, fout);
} catch (GeneralSecurityException e) {
throw new DatabaseException(e);
}
}
byte[] buf = new byte[8 + 4 + 8 + 4];
int offset = 0;
Utils.writeLongLE(buf, offset, MAGIC_NUMBER); offset += 8;
Utils.writeIntLE(buf, offset, ENCODING_VERSION); offset += 4;
Utils.writeLongLE(buf, offset, logId); offset += 8;
Utils.writeIntLE(buf, offset, termRndSeed = randomInt()); offset += 4;
if (offset != buf.length) {
throw new AssertionError();
}
try {
out.write(buf);
} catch (IOException e) {
Utils.closeQuietly(null, out);
file.delete();
throw e;
}
}
final OutputStream oldOut;
final FileChannel oldChannel;
synchronized (this) {
oldOut = mOut;
oldChannel = mChannel;
if (oldOut != null) {
writeOp(OP_END_FILE, System.currentTimeMillis());
writeTerminator();
doFlush();
}
mOut = out;
mChannel = channel;
mTermRndSeed = termRndSeed;
mLogId = logId;
timestamp();
}
try {
if (oldChannel != null) {
oldChannel.force(true);
}
} catch (IOException e) {
// Ignore.
}
Utils.closeQuietly(null, oldOut);
}
/**
* @return null if non-durable
*/
private static File fileFor(File base, long logId) {
return base == null ? null : new File(base.getPath() + ".redo." + logId);
}
public long size() throws IOException {
FileChannel channel = mChannel;
return channel == null ? 0 : channel.size();
}
public synchronized void flush() throws IOException {
doFlush();
}
public void sync() throws IOException {
flush();
force(false);
}
private void force(boolean metadata) throws IOException {
FileChannel channel = mChannel;
if (channel != null) {
try {
channel.force(metadata);
} catch (ClosedChannelException e) {
} catch (IOException e) {
throw Utils.rethrow(e, mCause);
}
}
}
@Override
public synchronized void close() throws IOException {
close(null);
}
@Override
public synchronized void close(Throwable cause) throws IOException {
if (cause != null) {
mCause = cause;
}
shutdown(OP_CLOSE);
}
@Override
public void shutdown() {
try {
shutdown(OP_SHUTDOWN);
} catch (IOException e) {
// Ignore.
}
}
void shutdown(byte op) throws IOException {
synchronized (this) {
mAlwaysFlush = true;
if (mChannel == null || !mChannel.isOpen()) {
return;
}
writeOp(op, System.currentTimeMillis());
writeTerminator();
doFlush();
if (op == OP_CLOSE) {
mChannel.force(true);
mChannel.close();
return;
}
}
force(true);
}
public void store(long indexId, byte[] key, byte[] value, DurabilityMode mode)
throws IOException
{
if (key == null) {
throw new NullPointerException("Key is null");
}
boolean sync;
synchronized (this) {
if (value == null) {
writeOp(OP_DELETE, indexId);
writeUnsignedVarInt(key.length);
writeBytes(key);
} else {
writeOp(OP_STORE, indexId);
writeUnsignedVarInt(key.length);
writeBytes(key);
writeUnsignedVarInt(value.length);
writeBytes(value);
}
writeTerminator();
sync = conditionalFlush(mode);
}
if (sync) {
force(false);
}
}
public synchronized void txnRollback(long txnId, long parentTxnId) throws IOException {
if (parentTxnId == 0) {
writeOp(OP_TXN_ROLLBACK, txnId);
} else {
writeOp(OP_TXN_ROLLBACK_CHILD, txnId);
writeLongLE(parentTxnId);
}
writeTerminator();
}
/**
* @return true if caller should call txnCommitSync
*/
public synchronized boolean txnCommitFull(long txnId, DurabilityMode mode) throws IOException {
writeOp(OP_TXN_COMMIT, txnId);
writeTerminator();
return conditionalFlush(mode);
}
/**
* Called after txnCommitFull.
*/
public void txnCommitSync() throws IOException {
force(false);
}
public synchronized void txnCommitScope(long txnId, long parentTxnId) throws IOException {
writeOp(OP_TXN_COMMIT_CHILD, txnId);
writeLongLE(parentTxnId);
writeTerminator();
}
public synchronized void txnStore(long txnId, long indexId, byte[] key, byte[] value)
throws IOException
{
if (key == null) {
throw new NullPointerException("Key is null");
}
if (value == null) {
writeOp(OP_TXN_DELETE, txnId);
writeLongLE(indexId);
writeUnsignedVarInt(key.length);
writeBytes(key);
} else {
writeOp(OP_TXN_STORE, txnId);
writeLongLE(indexId);
writeUnsignedVarInt(key.length);
writeBytes(key);
writeUnsignedVarInt(value.length);
writeBytes(value);
}
writeTerminator();
}
/*
public synchronized void txnStoreCommit(long txnId, long parentTxnId,
long indexId, byte[] key, byte[] value)
throws IOException
{
if (key == null) {
throw new NullPointerException("Key is null");
}
if (value == null) {
if (parentTxnId == 0) {
writeOp(OP_TXN_DELETE_COMMIT, txnId);
} else {
writeOp(OP_TXN_DELETE_COMMIT_CHILD, txnId);
writeLongLE(parentTxnId);
}
writeLongLE(indexId);
writeUnsignedVarInt(key.length);
writeBytes(key);
} else {
if (parentTxnId == 0) {
writeOp(OP_TXN_STORE_COMMIT, txnId);
} else {
writeOp(OP_TXN_STORE_COMMIT_CHILD, txnId);
writeLongLE(parentTxnId);
}
writeLongLE(indexId);
writeUnsignedVarInt(key.length);
writeBytes(key);
writeUnsignedVarInt(value.length);
writeBytes(value);
}
writeTerminator();
}
*/
synchronized void timestamp() throws IOException {
writeOp(OP_TIMESTAMP, System.currentTimeMillis());
writeTerminator();
}
// Caller must be synchronized.
private void writeIntLE(int v) throws IOException {
byte[] buffer = mBuffer;
int pos = mBufferPos;
if (pos > buffer.length - 4) {
doFlush(buffer, pos);
pos = 0;
}
Utils.writeIntLE(buffer, pos, v);
mBufferPos = pos + 4;
}
// Caller must be synchronized.
private void writeLongLE(long v) throws IOException {
byte[] buffer = mBuffer;
int pos = mBufferPos;
if (pos > buffer.length - 8) {
doFlush(buffer, pos);
pos = 0;
}
Utils.writeLongLE(buffer, pos, v);
mBufferPos = pos + 8;
}
// Caller must be synchronized.
private void writeOp(byte op, long operand) throws IOException {
byte[] buffer = mBuffer;
int pos = mBufferPos;
if (pos >= buffer.length - 9) {
doFlush(buffer, pos);
pos = 0;
}
buffer[pos] = op;
Utils.writeLongLE(buffer, pos + 1, operand);
mBufferPos = pos + 9;
}
// Caller must be synchronized.
private void writeTerminator() throws IOException {
writeIntLE(nextTermRnd());
}
// Caller must be synchronized (replay is exempt)
private int nextTermRnd() throws IOException {
// Xorshift RNG by George Marsaglia.
int x = mTermRndSeed;
x ^= x << 13;
x ^= x >>> 17;
x ^= x << 5;
mTermRndSeed = x;
return x;
}
// Caller must be synchronized.
private void writeUnsignedVarInt(int v) throws IOException {
byte[] buffer = mBuffer;
int pos = mBufferPos;
if (pos > buffer.length - 5) {
doFlush(buffer, pos);
pos = 0;
}
mBufferPos = Utils.writeUnsignedVarInt(buffer, pos, v);
}
// Caller must be synchronized.
private void writeBytes(byte[] bytes) throws IOException {
writeBytes(bytes, 0, bytes.length);
}
// Caller must be synchronized.
private void writeBytes(byte[] bytes, int offset, int length) throws IOException {
if (length == 0) {
return;
}
byte[] buffer = mBuffer;
int pos = mBufferPos;
while (true) {
if (pos <= buffer.length - length) {
System.arraycopy(bytes, offset, buffer, pos, length);
mBufferPos = pos + length;
return;
}
int remaining = buffer.length - pos;
System.arraycopy(bytes, offset, buffer, pos, remaining);
doFlush(buffer, buffer.length);
pos = 0;
offset += remaining;
length -= remaining;
}
}
// Caller must be synchronized. Returns true if caller should sync.
private boolean conditionalFlush(DurabilityMode mode) throws IOException {
switch (mode) {
default:
return false;
case NO_FLUSH:
if (mAlwaysFlush) {
doFlush();
}
return false;
case SYNC:
doFlush();
return true;
case NO_SYNC:
doFlush();
return false;
}
}
// Caller must be synchronized.
private void doFlush() throws IOException {
doFlush(mBuffer, mBufferPos);
}
// Caller must be synchronized.
private void doFlush(byte[] buffer, int pos) throws IOException {
try {
mOut.write(buffer, 0, pos);
mBufferPos = 0;
} catch (IOException e) {
throw Utils.rethrow(e, mCause);
}
}
private void replay(DataIn in, RedoLogVisitor visitor) throws IOException {
long magic = in.readLongLE();
if (magic != MAGIC_NUMBER) {
if (magic == 0) {
// Assume file was flushed improperly and discard it.
return;
}
throw new DatabaseException("Incorrect magic number in redo log file");
}
int version = in.readIntLE();
if (version != ENCODING_VERSION) {
throw new DatabaseException("Unsupported redo log encoding version: " + version);
}
long id = in.readLongLE();
if (id != mLogId) {
throw new DatabaseException
("Expected redo log identifier of " + mLogId + ", but actual is: " + id);
}
mTermRndSeed = in.readIntLE();
int op;
while ((op = in.read()) >= 0) {
long operand = in.readLongLE();
switch (op &= 0xff) {
default:
throw new DatabaseException("Unknown redo log operation: " + op);
case OP_TIMESTAMP:
if (!verifyTerminator(in)) {
return;
}
visitor.timestamp(operand);
break;
case OP_SHUTDOWN:
if (!verifyTerminator(in)) {
return;
}
visitor.shutdown(operand);
break;
case OP_CLOSE:
if (!verifyTerminator(in)) {
return;
}
visitor.close(operand);
break;
case OP_END_FILE:
if (!verifyTerminator(in)) {
return;
}
visitor.endFile(operand);
break;
case OP_TXN_ROLLBACK:
if (!verifyTerminator(in)) {
return;
}
visitor.txnRollback(operand, 0);
break;
case OP_TXN_ROLLBACK_CHILD:
long parentTxnId = in.readLongLE();
if (!verifyTerminator(in)) {
return;
}
visitor.txnRollback(operand, parentTxnId);
break;
case OP_TXN_COMMIT:
if (!verifyTerminator(in)) {
return;
}
visitor.txnCommit(operand, 0);
break;
case OP_TXN_COMMIT_CHILD:
parentTxnId = in.readLongLE();
if (!verifyTerminator(in)) {
return;
}
visitor.txnCommit(operand, parentTxnId);
break;
case OP_STORE:
byte[] key = in.readBytes();
byte[] value = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.store(operand, key, value);
break;
case OP_DELETE:
key = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.store(operand, key, null);
break;
case OP_TXN_STORE:
long indexId = in.readLongLE();
key = in.readBytes();
value = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.txnStore(operand, indexId, key, value);
break;
case OP_TXN_STORE_COMMIT:
indexId = in.readLongLE();
key = in.readBytes();
value = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.txnStore(operand, indexId, key, value);
visitor.txnCommit(operand, 0);
break;
case OP_TXN_STORE_COMMIT_CHILD:
parentTxnId = in.readLongLE();
indexId = in.readLongLE();
key = in.readBytes();
value = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.txnStore(operand, indexId, key, value);
visitor.txnCommit(operand, parentTxnId);
break;
case OP_TXN_DELETE:
indexId = in.readLongLE();
key = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.txnStore(operand, indexId, key, null);
break;
case OP_TXN_DELETE_COMMIT:
indexId = in.readLongLE();
key = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.txnStore(operand, indexId, key, null);
visitor.txnCommit(operand, 0);
break;
case OP_TXN_DELETE_COMMIT_CHILD:
parentTxnId = in.readLongLE();
indexId = in.readLongLE();
key = in.readBytes();
if (!verifyTerminator(in)) {
return;
}
visitor.txnStore(operand, indexId, key, null);
visitor.txnCommit(operand, parentTxnId);
break;
}
}
}
/**
* If false is returned, assume rest of log file is corrupt.
*/
private boolean verifyTerminator(DataIn in) throws IOException {
try {
return in.readIntLE() == nextTermRnd();
} catch (EOFException e) {
return false;
}
}
}
| Handle unflushed redo log.
| src/main/java/org/cojen/tupl/RedoLog.java | Handle unflushed redo log. | <ide><path>rc/main/java/org/cojen/tupl/RedoLog.java
<ide> default:
<ide> throw new DatabaseException("Unknown redo log operation: " + op);
<ide>
<add> case 0:
<add> // Assume redo log did not flush completely.
<add> return;
<add>
<ide> case OP_TIMESTAMP:
<ide> if (!verifyTerminator(in)) {
<ide> return; |
|
Java | apache-2.0 | ae22d415db87d20a62f6b109a95771c3e0bb6986 | 0 | mtransitapps/ca-victoria-regional-transit-system-bus-parser,mtransitapps/ca-victoria-regional-transit-system-bus-parser | package org.mtransit.parser.ca_victoria_regional_transit_system_bus;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.mtransit.commons.StrategicMappingCommons;
import org.mtransit.parser.CleanUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.Pair;
import org.mtransit.parser.SplitUtils;
import org.mtransit.parser.SplitUtils.RouteTripSpec;
import org.mtransit.parser.Utils;
import org.mtransit.parser.gtfs.data.GCalendar;
import org.mtransit.parser.gtfs.data.GCalendarDate;
import org.mtransit.parser.gtfs.data.GRoute;
import org.mtransit.parser.gtfs.data.GSpec;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.gtfs.data.GTrip;
import org.mtransit.parser.gtfs.data.GTripStop;
import org.mtransit.parser.mt.data.MAgency;
import org.mtransit.parser.mt.data.MRoute;
import org.mtransit.parser.mt.data.MTrip;
import org.mtransit.parser.mt.data.MTripStop;
// https://www.bctransit.com/open-data
// https://victoria.mapstrat.com/current/google_transit.zip
public class VictoriaRegionalTransitSystemBusAgencyTools extends DefaultAgencyTools {
public static void main(String[] args) {
if (args == null || args.length == 0) {
args = new String[3];
args[0] = "input/gtfs.zip";
args[1] = "../../mtransitapps/ca-victoria-regional-transit-system-bus-android/res/raw/";
args[2] = ""; // files-prefix
}
new VictoriaRegionalTransitSystemBusAgencyTools().start(args);
}
private HashSet<String> serviceIds;
@Override
public void start(String[] args) {
System.out.printf("\nGenerating Victoria Regional TS bus data...");
long start = System.currentTimeMillis();
this.serviceIds = extractUsefulServiceIds(args, this, true);
super.start(args);
System.out.printf("\nGenerating Victoria Regional TS bus data... DONE in %s.\n", Utils.getPrettyDuration(System.currentTimeMillis() - start));
}
@Override
public boolean excludingAll() {
return this.serviceIds != null && this.serviceIds.isEmpty();
}
private static final String INCLUDE_ONLY_SERVICE_ID_STARTS_WITH = null;
private static final String INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2 = null;
@Override
public boolean excludeCalendar(GCalendar gCalendar) {
if (INCLUDE_ONLY_SERVICE_ID_STARTS_WITH != null && !gCalendar.getServiceId().startsWith(INCLUDE_ONLY_SERVICE_ID_STARTS_WITH)
&& INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2 != null && !gCalendar.getServiceId().startsWith(INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2)) {
return true;
}
if (this.serviceIds != null) {
return excludeUselessCalendar(gCalendar, this.serviceIds);
}
return super.excludeCalendar(gCalendar);
}
@Override
public boolean excludeCalendarDate(GCalendarDate gCalendarDates) {
if (INCLUDE_ONLY_SERVICE_ID_STARTS_WITH != null && !gCalendarDates.getServiceId().startsWith(INCLUDE_ONLY_SERVICE_ID_STARTS_WITH)
&& INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2 != null && !gCalendarDates.getServiceId().startsWith(INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2)) {
return true;
}
if (this.serviceIds != null) {
return excludeUselessCalendarDate(gCalendarDates, this.serviceIds);
}
return super.excludeCalendarDate(gCalendarDates);
}
private static final String INCLUDE_AGENCY_ID = "1"; // Victoria Regional Transit System only
@Override
public boolean excludeRoute(GRoute gRoute) {
if (!INCLUDE_AGENCY_ID.equals(gRoute.getAgencyId())) {
return true;
}
return super.excludeRoute(gRoute);
}
@Override
public boolean excludeTrip(GTrip gTrip) {
if (INCLUDE_ONLY_SERVICE_ID_STARTS_WITH != null && !gTrip.getServiceId().startsWith(INCLUDE_ONLY_SERVICE_ID_STARTS_WITH)
&& INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2 != null && !gTrip.getServiceId().startsWith(INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2)) {
return true;
}
if (this.serviceIds != null) {
return excludeUselessTrip(gTrip, this.serviceIds);
}
return super.excludeTrip(gTrip);
}
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
@Override
public long getRouteId(GRoute gRoute) {
if ("ARB".equalsIgnoreCase(gRoute.getRouteShortName())) {
return 999L;
}
return Long.parseLong(gRoute.getRouteShortName()); // use route short name as route ID
}
@Override
public String getRouteLongName(GRoute gRoute) {
String routeLongName = gRoute.getRouteLongName();
routeLongName = CleanUtils.cleanNumbers(routeLongName);
routeLongName = CleanUtils.cleanStreetTypes(routeLongName);
return CleanUtils.cleanLabel(routeLongName);
}
private static final String AGENCY_COLOR_GREEN = "34B233";// GREEN (from PDF Corporate Graphic Standards)
private static final String AGENCY_COLOR_BLUE = "002C77"; // BLUE (from PDF Corporate Graphic Standards)
private static final String AGENCY_COLOR = AGENCY_COLOR_GREEN;
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
@Override
public String getRouteColor(GRoute gRoute) {
if (StringUtils.isEmpty(gRoute.getRouteColor())) {
return AGENCY_COLOR_BLUE;
}
return super.getRouteColor(gRoute);
}
private static final String AND = " & ";
private static final String EXCH = "Exch";
private static final String DOWNTOWN = "Downtown";
private static final String OAK_BAY = "Oak Bay";
private static final String SOUTH_OAK_BAY = "South " + OAK_BAY;
private static final String ROYAL_OAK = "Royal Oak";
private static final String ROYAL_OAK_EXCH = ROYAL_OAK + " " + EXCH;
private static final String CAMOSUN = "Camosun";
private static final String JAMES_BAY = "James Bay";
private static final String DOCKYARD = "Dockyard";
private static final String ADMIRALS_WALK = "Admirals Walk";
private static final String HILLSIDE = "Hillside";
private static final String HILLSIDE_MALL = HILLSIDE + " Mall";
private static final String U_VIC = "UVic";
private static final String BRENTWOOD = "Brentwood";
private static final String SAANICHTON = "Saanichton";
private static final String SAANICHTON_EXCH = SAANICHTON + " " + EXCH;
private static final String SWARTZ_BAY = "Swartz Bay";
private static final String SWARTZ_BAY_FERRY = SWARTZ_BAY + " Ferry";
private static final String SOOKE = "Sooke";
private static final String LANGFORD = "Langford";
private static final String LANGFORD_EXCH = LANGFORD + " " + EXCH;
private static final String COLWOOD_EXCH = "Colwood " + EXCH;
private static final String HAPPY_VLY = "Happy Vly";
private static final String TILLICUM_MALL = "Tillicum Mall";
private static final String SPECTRUM_SCHOOL = "Spectrum School";
private static final String GORGE = "Gorge";
private static final String INTERURBAN = "Interurban";
private static final String MILE_HOUSE = "Mile House";
private static final String VERDIER = "Verdier";
private static final String SIDNEY = "Sidney";
private static final String BEAR_MOUTAIN = "Bear Mtn";
private static final String MC_DONALD_PARK = "McDonald Pk";
private static final String MC_TAVISH = "McTavish";
private static final String MC_TAVISH_EXCH = MC_TAVISH + " " + EXCH;
private static final String VIC_GENERAL = "Vic General";
private static final String UPTOWN = "Uptown";
private static final String RICHMOND = "Richmond";
private static final String MC_KENZIE = "McKenzie";
private static final String DOUGLAS = "Douglas";
private static final String WILLOWS = "Willows";
private static final String WESTHILLS_EXCH = "Westhills Exch";
private static final String KEATING = "Keating";
private static final String SHORELINE_SCHOOL = "Shoreline Sch";
private static final String R_JUBILEE = "R. Jubilee";
private static final String VIC_WEST = "Vic West";
private static HashMap<Long, RouteTripSpec> ALL_ROUTE_TRIPS2;
static {
HashMap<Long, RouteTripSpec> map2 = new HashMap<Long, RouteTripSpec>();
ALL_ROUTE_TRIPS2 = map2;
}
@Override
public int compareEarly(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) {
if (ALL_ROUTE_TRIPS2.containsKey(routeId)) {
return ALL_ROUTE_TRIPS2.get(routeId).compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop, this);
}
return super.compareEarly(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
@Override
public ArrayList<MTrip> splitTrip(MRoute mRoute, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return ALL_ROUTE_TRIPS2.get(mRoute.getId()).getAllTrips();
}
return super.splitTrip(mRoute, gTrip, gtfs);
}
@Override
public Pair<Long[], Integer[]> splitTripStop(MRoute mRoute, GTrip gTrip, GTripStop gTripStop, ArrayList<MTrip> splitTrips, GSpec routeGTFS) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return SplitUtils.splitTripStop(mRoute, gTrip, gTripStop, routeGTFS, ALL_ROUTE_TRIPS2.get(mRoute.getId()), this);
}
return super.splitTripStop(mRoute, gTrip, gTripStop, splitTrips, routeGTFS);
}
@Override
public void setTripHeadsign(MRoute mRoute, MTrip mTrip, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return; // split
}
String tripHeadsign = gTrip.getTripHeadsign();
tripHeadsign = Pattern.compile("(^" + mRoute.getShortName() + "( )?)", Pattern.CASE_INSENSITIVE).matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
if (mRoute.getId() == 1L) {
if (gTrip.getDirectionId() == 0) { // DOWNTOWN - WEST
if (Arrays.asList( //
"Downtown" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // SOUTH OAK BAY - EAST
if (Arrays.asList( //
"South Oak Bay via Richardson" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mRoute.getId() == 2L) {
if (gTrip.getDirectionId() == 0) { // JAMES BAY - WEST
if (Arrays.asList( //
"James Bay - Fisherman's Wharf" // <>
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // OAK BAY - EAST
if (Arrays.asList( //
"James Bay - Fisherman's Wharf", // <>
"Downtown", //
"Downtown Only", //
"South Oak Bay - Oak Bay Village", //
"Willows - Oak Bay Village" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mRoute.getId() == 3L) {
if (gTrip.getDirectionId() == 0) { // JAMES BAY - CLOCKWISE
if (Arrays.asList( //
"Downtown Only", //
"James Bay To 10 R. Jubilee", //
"James Bay - Linden to 10 R. Jubilee", //
"James Bay - Quimper To 10 R. Jubilee" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
return;
}
} else if (gTrip.getDirectionId() == 1) { // ROYAL JUBILEE - COUNTERCLOCKWISE
if (Arrays.asList( //
"Royal Jubilee - Cook St Village", //
"Royal Jubilee - Cook St Vlg/Quimper" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mRoute.getId() == 4L) {
if (gTrip.getDirectionId() == 0) { // DOWNTOWN - WEST
if (Arrays.asList( //
"Downtown", //
"To Gorge & Douglas" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic Via Hillside" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mRoute.getId() == 6L) {
if (gTrip.getDirectionId() == 0) { // ROYAL OAK - NORTH
if (Arrays.asList( //
"Royal Oak Exch Via Royal Oak Mall", //
"A Royal Oak Exch Via Emily Carr", //
"B Royal Oak Exch Via Chatterton" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"Downtown", //
"B Downtown Via Chatterton", //
"A Downtown Via Emily Carr" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mRoute.getId() == 7L) {
if (gTrip.getDirectionId() == 0) { // DOWNTOWN - CLOCKWISE
if (Arrays.asList( //
"Downtown Only", //
"N Downtown Only", //
"Downtown - To 21 Interurban", //
"N Downtown - To 21 Interurban", //
"Downtown To 21 Interurban" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - COUNTERCLOCKWISE
if (Arrays.asList( //
"UVic Via Fairfield", //
"N UVic - Cook St Village" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 8L) {
if (gTrip.getDirectionId() == 0) { // INTERURBAN - WEST
if (Arrays.asList( //
"Tillicum Mall Via Finalyson", //
"Interurban Via Finlayson" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // OAK BAY - EAST
if (Arrays.asList( //
"To Richmond & Oak Bay Ave Only", //
"To Douglas Only - Mayfair Mall", //
"Oak Bay Via Finlayson", //
"Oak Bay Via Finalyson" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 9L) {
if (gTrip.getDirectionId() == 0) { // ROYAL OAK - WEST
if (Arrays.asList( //
"Royal Oak Exch - Hillside/Gorge" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic - Gorge/Hillside" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 10L) {
if (gTrip.getDirectionId() == 0) { // ROYAL JUBILEE - CLOCKWISE
if (Arrays.asList( //
"Royal Jubilee Via Vic West" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
return;
}
} else if (gTrip.getDirectionId() == 1) { // JAMES BAY - COUNTERCLOCKWISE
if (Arrays.asList( //
"James Bay - To 3 R. Jubilee", //
"To Vic West Only" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 11L) {
if (gTrip.getDirectionId() == 0) { // TILLICUM MALL - WEST
if (Arrays.asList( //
"Tillicum Mall Via Gorge" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"Downtown", //
"Downtown Only", //
"UVic Via Uplands" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 12L) {
if (gTrip.getDirectionId() == 0) { // UNIVERSITY HGTS - WEST
if (Arrays.asList( //
"University Hgts Via Kenmore" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic Via Kenmore" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 13L) {
if (gTrip.getDirectionId() == 0) { // UVIC - WEST
if (Arrays.asList( //
"UVic" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // TEN MILE POINT - EAST
if (Arrays.asList( //
"Ten Mile Point" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 14L) {
if (gTrip.getDirectionId() == 0) { // VIC GENERAL - WEST
if (Arrays.asList( //
"Downtown", //
"Vic General Via Craigflower" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"Downtown Only", //
"Downtown", //
"UVic", //
"UVic Via Richmond" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 15L) {
if (gTrip.getDirectionId() == 0) { // ESQUIMALT - WEST
if (Arrays.asList( //
"Esquimalt", //
"Esquimalt - Fort/Yates Exp" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"Downtown", //
"UVic - Foul Bay Exp" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 16L) {
if (gTrip.getDirectionId() == 0) { // UPTOWN - WEST
if (Arrays.asList( //
"Uptown - McKenzie Exp" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic - McKenzie Exp" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 17L) {
if (gTrip.getDirectionId() == 0) { // Downtown - WEST
if (Arrays.asList( //
"Downtown Via Quadra" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic Via Cedar Hill Sch" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 21L) {
if (gTrip.getDirectionId() == 0) { // INTERURBAN - CLOCKWISE
if (Arrays.asList( //
"Interurban - VI Tech Park", //
"Interurban - Camosun Only", //
"Interurban - Viaduct Loop", //
"N Camosun Via Burnside" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - COUNTERCLOCKWISE
if (Arrays.asList( //
"Downtown To 7 UVic" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 22L) {
if (gTrip.getDirectionId() == 0) { // VIC GENERAL - NORTH
if (Arrays.asList( //
"Downtown Only", //
"Downtown", //
"Vic General - Watkiss Way Via Burnside", //
"A Vic General - Watkiss Wy Via S. Vale", //
"A Vic General Via Straw Vale", //
"A Vic General Via S. Vale", //
"To Spectrum School", //
"Vic General Via Burnside" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // NILLSIDE MALL - SOUTH
if (Arrays.asList( //
"Downtown Only", //
"Downtown", //
"A Hillside Mall Via Straw Vale", //
"Hillside Mall Via Fernwood" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 24L) {
if (gTrip.getDirectionId() == 0) { // Admirals Walk - WEST
if (Arrays.asList( //
"Downtown Only", //
"Downtown", //
"Admirals Walk Via Parklands/Colville", //
"Admirals Walk Via Colville" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // Cedar Hill - EAST
if (Arrays.asList( //
"Cedar Hill", //
"Cedar Hill Via Parklands" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 25L) {
if (gTrip.getDirectionId() == 0) { // Admirals Walk - WEST
if (Arrays.asList( //
"Shoreline Sch Via Munro", //
"Admirals Walk Via Munro" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // Maplewood - EAST
if (Arrays.asList( //
"Maplewood" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 26L) {
if (gTrip.getDirectionId() == 0) { // DOCKYARD - WEST
if (Arrays.asList( //
"To Uptown Only", //
"Dockyard Via McKenzie" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"To Uptown Only", //
"UVic Via McKenzie" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 27L) {
if (gTrip.getDirectionId() == 0) { // GORDON HEAD - NORTH
if (Arrays.asList( //
"Gordon Head Via Shelbourne" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"X Express To Downtown", //
"To Hillside Only", //
"Downtown" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 28L) {
if (gTrip.getDirectionId() == 0) { // MAJESTIC - NORTH
if (Arrays.asList( //
"X Express To Majestic", //
"Majestic Via Shelbourne" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"To McKenzie Only", //
"To Hillside Only", //
"Downtown" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 30L) {
if (Arrays.asList( //
"Royal Oak Exch Via Carey", //
"Royal Oak Exch To 75 Saanichton" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
if (Arrays.asList( //
"Downtown" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
} else if (mTrip.getRouteId() == 31L) {
if (gTrip.getDirectionId() == 0) { // ROYAL OAK - NORTH
if (Arrays.asList( //
"Royal Oak Exch To 75 Saanichton", //
"Royal Oak Exch Via Glanford" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"To Gorge Only", //
"To Uptown Only", //
"Downtown" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 32L) {
if (gTrip.getDirectionId() == 0) { // Cordova Bay - NORTH
if (Arrays.asList( //
"Cordova Bay" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // ROYAL OAK - SOUTH
if (Arrays.asList( //
"Downtown", //
"Royal Oak Exch" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 35L) {
// TODO split? NORTH/SOUTH
if (gTrip.getDirectionId() == 0) { // Ridge - CLOCKWISE
if (Arrays.asList( //
"Ridge" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 39L) {
if (gTrip.getDirectionId() == 0) { // WESTHILLS - WEST
if (Arrays.asList( //
"Royal Oak Exch", //
"Interurban", //
"Westhills Exch" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic Via Royal Oak" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 43L) {
if (gTrip.getDirectionId() == 0) { // ROYAL ROADS - CLOCKWISE
if (Arrays.asList( //
"Belmont Park - Royal Roads" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 46L) {
if (gTrip.getDirectionId() == 0) { // WESTHILLS - WEST
if (Arrays.asList( //
"Westhills Exch" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOCKYARD - EAST
if (Arrays.asList( //
"Dockyard" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 47L) {
if (gTrip.getDirectionId() == 0) { // GOLDSTREAM MEADOWS - WEST
if (Arrays.asList( //
"Goldstream Mdws Via Thetis Hgts" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
if (Arrays.asList( //
"Downtown" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 48L) {
if (gTrip.getDirectionId() == 0) { // HAPPY VALLEY - WEST
if (Arrays.asList( //
"Happy Valley via Colwood", //
"HAPPY VALLEY VIA COLWOOD" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
if (Arrays.asList( //
"Downtown" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 50L) {
if (gTrip.getDirectionId() == 0) { // LANGFORD - WEST
if (Arrays.asList( //
"Langford To 61 Sooke", //
"Langford" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
if (Arrays.asList( //
"Downtown" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 51L) {
if (gTrip.getDirectionId() == 0) { // LANGFORD - WEST
if (Arrays.asList( //
"Langford - McKenzie Exp" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic - McKenzie Exp" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 52L) {
if (gTrip.getDirectionId() == 0) { // BEAR MOUNTAIN - WEST
if (Arrays.asList( //
"Langford Exch Via Royal Bay", //
"Langford Exch Via Lagoon", //
"Langford Exch", //
"Bear Mountain - Lagoon/Royal Bay", //
"Bear Mountain Via Lagoon", //
"Bear Mountain" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // COLWOOD EXCHANGE - EAST
if (Arrays.asList( //
"Langford Exch", //
"Colwood Exch Via Royal Bay/Lagoon", //
"Colwood Exch Via Royal Bay", //
"Colwood Exch Via Lagoon", //
"Colwood Exch" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 53L) {
if (gTrip.getDirectionId() == 0) { // COLWOOD EXCHANGE - CLOCKWISE
if (Arrays.asList( //
"Colwood Exch Via Atkins - Thetis Lk", //
"Colwood Exch Via Atkins" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
return;
}
} else if (gTrip.getDirectionId() == 1) { // LANGFORD EXCHANGE - COUNTERCLOCKWISE
if (Arrays.asList( //
"Langford Exch Via Atkins - Theits Lk", //
"Langford Exch Via Atkins" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 54L) {
if (gTrip.getDirectionId() == 0) { // LANGFORD EXCHANGE - CLOCKWISE
if (Arrays.asList( //
"Metchosin" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 55L) {
if (gTrip.getDirectionId() == 1) { // LANGFORD EXCHANGE - COUNTERCLOCKWISE
if (Arrays.asList( //
"Happy Valley To Colwood Exch", //
"Happy Valley" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 56L) {
if (gTrip.getDirectionId() == 0) { // THETIS HEIGHTS - NORTH
if (Arrays.asList( //
"Thetis Heights Via Florence Lake" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // LANGFORD EXCHANGE - SOUTH
if (Arrays.asList( //
"Langford Exch" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 57L) {
if (gTrip.getDirectionId() == 0) { // THETIS HEIGHTS - NORTH
if (Arrays.asList( //
"Thetis Heights Via Millstream", //
"Theits Heights Via Millstream" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // LANGFORD EXCHANGE - SOUTH
if (Arrays.asList( //
"Langford Exch" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 58L) {
if (gTrip.getDirectionId() == 1) { // GOLDSTREAM MEADOWS - OUTBOUND
if (Arrays.asList( //
"Goldstream Mdws" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.OUTBOUND);
return;
}
}
} else if (mTrip.getRouteId() == 59L) {
if (gTrip.getDirectionId() == 1) { // LANGFORD EXCHANGE - COUNTERCLOCKWISE
if (Arrays.asList( //
"Triangle Mtn Via Royal Bay", //
"Triangle Mtn" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 60L) {
if (gTrip.getDirectionId() == 0) { // LANGFORD EXCHANGE - CLOCKWISE
if (Arrays.asList( //
"Wishart Via Royal Bay", //
"Wishart" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 61L) {
if (gTrip.getDirectionId() == 0) { // SOOKE - WEST
if (Arrays.asList( //
"Sooke" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
if (Arrays.asList( //
"Langford - Jacklin/Station", //
"Langford Exch To 50 Downtown", //
"Downtown" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 63L) {
// TODO split?
if (gTrip.getDirectionId() == 0) { // OTTER POINT - WEST
if (Arrays.asList( //
"Otter Point" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
}
} else if (mTrip.getRouteId() == 64L) {
// TODO split
if (gTrip.getDirectionId() == 0) { // SOOKE - CLOCKWISE
if (Arrays.asList( //
"East Sooke To 17 Mile House", //
"East Sooke To Langford", //
"East Sooke To Sooke" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
return;
}
}
if (isGoodEnoughAccepted()) {
if (gTrip.getDirectionId() == 1) { // SOOKE - ????
if (Arrays.asList( //
"East Sooke" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
}
} else if (mTrip.getRouteId() == 65L) {
if (gTrip.getDirectionId() == 0) { // SOOKE - WEST
if (Arrays.asList( //
"Sooke Via Westhills" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
if (Arrays.asList( //
"Downtown Via Westhills" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 70L) {
if (gTrip.getDirectionId() == 0) { // SWARTZ BAY FERRY - NORTH
if (Arrays.asList( //
"Swartz Bay Ferry Via Hwy #17" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"To Gorge Only Via Hwy #17", //
"Downtown Via Hwy #17" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 71L) {
if (gTrip.getDirectionId() == 0) { // SWARTZ BAY FERRY - NORTH
if (Arrays.asList( //
"Swartz Bay Ferry Via West Sidney" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"Downtown" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 72L) {
if (gTrip.getDirectionId() == 0) { // SWARTZ BAY FERRY - NORTH
if (Arrays.asList( //
"McDonald Park Via Saanichton", //
"Swartz Bay Ferry Via Saanichton" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"McTavish Exch", //
"Downtown" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 75L) {
if (gTrip.getDirectionId() == 0) { // SAANICHTON - NORTH
if (Arrays.asList( //
"To Keating Only", //
"Saanichton Exch Via Verdier", //
"Saanichton Exch" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"Royal Oak Exch To 30 Downtown", //
"Royal Oak Exch To 31 Downtown", //
"Downtown" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 76L) {
if (isGoodEnoughAccepted()) { // TODO check
if (gTrip.getDirectionId() == 0) { // SWARTZ BAY FERRY - NORTH
if (Arrays.asList( //
"Swartz Bay Ferry Non-Stop" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - SOUTH
if (Arrays.asList( //
"UVic - Via Express" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
}
} else if (mTrip.getRouteId() == 81L) {
if (gTrip.getDirectionId() == 0) { // SWARTZ BAY FERRY - NORTH
if (Arrays.asList( //
"To Sidney Only", //
"Swartz Bay Ferry" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // BRENTWOOD - SOUTH
if (Arrays.asList( //
"Saanichton Exch", //
"Brentwood To Verdier Only", //
"Brentwood" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 82L) {
if (gTrip.getDirectionId() == 0) { // SIDNEY - NORTH
if (Arrays.asList( //
"Sidney Via Stautw" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // SAANICHTON - SOUTH
if (Arrays.asList( //
"To Brentwood Via Stautw", //
"Saanichton Exch Via Stautw" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 83L) {
if (gTrip.getDirectionId() == 0) { // SIDNEY - NORTH
if (Arrays.asList( //
"Sidney Via West Saanich" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // ROYAL OAK - SOUTH
if (Arrays.asList( //
"Royal Oak Exch Via West Saanich" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 85L) {
// TODO split
if (gTrip.getDirectionId() == 0) { // NORTH SAANICH - CLOCKWISE
if (Arrays.asList( //
"North Saanich" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
return;
}
} else if (gTrip.getDirectionId() == 1) { // NORTH SAANICH - CLOCKWISE
if (Arrays.asList( //
"North Saanich" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 87L) {
if (gTrip.getDirectionId() == 0) { // SIDNEY - NORTH
if (Arrays.asList( //
"Sidney" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // SAANICHTON - SOUTH
if (Arrays.asList( //
"Dean Park Via Airport To Saanichton" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 88L) {
if (gTrip.getDirectionId() == 0) { // SIDNEY - NORTH
if (Arrays.asList( //
"Sidney" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // AIRPORT - SOUTH
if (Arrays.asList( //
"Airport" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 999L) { // ARB
if (isGoodEnoughAccepted()) { // TODO check
if (gTrip.getDirectionId() == 1) { // ??? - ????
if (Arrays.asList( //
"Shuttle Bus" //
).contains(tripHeadsign)) {
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
}
}
System.out.printf("\n%s: Unexpected trips headsign for %s!\n", mTrip.getRouteId(), gTrip);
System.exit(-1);
}
@Override
public boolean mergeHeadsign(MTrip mTrip, MTrip mTripToMerge) {
List<String> headsignsValues = Arrays.asList(mTrip.getHeadsignValue(), mTripToMerge.getHeadsignValue());
if (mTrip.getRouteId() == 2L) {
if (Arrays.asList( //
JAMES_BAY, // <>
DOWNTOWN, // <>
WILLOWS, //
SOUTH_OAK_BAY //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(SOUTH_OAK_BAY, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 3L) {
if (Arrays.asList( //
DOWNTOWN, // <>
"10 " + R_JUBILEE, //
JAMES_BAY // ++
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(JAMES_BAY, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 4L) {
if (Arrays.asList( //
GORGE + AND + DOUGLAS, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 6L) {
if (Arrays.asList( //
"A " + DOWNTOWN, //
"B " + DOWNTOWN, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
"A " + ROYAL_OAK_EXCH, //
"B " + ROYAL_OAK_EXCH, //
ROYAL_OAK_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(ROYAL_OAK_EXCH, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 7L) {
if (Arrays.asList( //
"N " + U_VIC, //
U_VIC //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(U_VIC, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
INTERURBAN, //
"N " + DOWNTOWN, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 8L) {
if (Arrays.asList( //
DOUGLAS, //
RICHMOND + AND + OAK_BAY + " Ave", //
OAK_BAY //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(OAK_BAY, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
TILLICUM_MALL, //
INTERURBAN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(INTERURBAN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 10L) {
if (Arrays.asList( //
VIC_WEST, //
JAMES_BAY //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(JAMES_BAY, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 11L) {
if (Arrays.asList( //
DOWNTOWN, //
U_VIC //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(U_VIC, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 14L) {
if (Arrays.asList( //
DOWNTOWN, // <>
U_VIC //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(U_VIC, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
DOWNTOWN, // <>
VIC_GENERAL //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(VIC_GENERAL, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 15L) {
if (Arrays.asList( //
DOWNTOWN, //
U_VIC //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(U_VIC, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 21L) {
if (Arrays.asList( //
"N " + CAMOSUN, //
INTERURBAN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(INTERURBAN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 22L) {
if (Arrays.asList( //
DOWNTOWN, // <>
"A " + VIC_GENERAL, //
SPECTRUM_SCHOOL, //
VIC_GENERAL //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(VIC_GENERAL, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
DOWNTOWN, // <>
"A " + HILLSIDE_MALL, //
HILLSIDE_MALL //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(HILLSIDE_MALL, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 24L) {
if (Arrays.asList( //
DOWNTOWN, //
ADMIRALS_WALK //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(ADMIRALS_WALK, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 25L) {
if (Arrays.asList( //
SHORELINE_SCHOOL, //
ADMIRALS_WALK //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(ADMIRALS_WALK, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 26L) {
if (Arrays.asList( //
UPTOWN, // <>
U_VIC //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(U_VIC, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
UPTOWN, // <>
DOCKYARD //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOCKYARD, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 27L) {
if (Arrays.asList( //
HILLSIDE, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 28L) {
if (Arrays.asList( //
MC_KENZIE, //
HILLSIDE, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 30L) {
if (Arrays.asList( //
"75 " + SAANICHTON, //
ROYAL_OAK_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(ROYAL_OAK_EXCH, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 31L) {
if (Arrays.asList( //
"75 " + SAANICHTON, //
ROYAL_OAK_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(ROYAL_OAK_EXCH, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
GORGE, //
UPTOWN, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 32L) {
if (Arrays.asList( //
DOWNTOWN, //
ROYAL_OAK_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(ROYAL_OAK_EXCH, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 39L) {
if (Arrays.asList( //
ROYAL_OAK_EXCH, //
INTERURBAN, //
WESTHILLS_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(WESTHILLS_EXCH, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 50L) {
if (Arrays.asList( //
"61 " + SOOKE, //
LANGFORD //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(LANGFORD, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 52L) {
if (Arrays.asList( //
LANGFORD_EXCH, // <>
COLWOOD_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(COLWOOD_EXCH, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
LANGFORD_EXCH, // <>
BEAR_MOUTAIN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(BEAR_MOUTAIN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 55L) {
if (Arrays.asList( //
COLWOOD_EXCH, //
HAPPY_VLY //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(HAPPY_VLY, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 61L) {
if (Arrays.asList( //
LANGFORD, //
"50 " + DOWNTOWN, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 64L) {
if (Arrays.asList( //
"17 " + MILE_HOUSE, //
LANGFORD, //
SOOKE //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(SOOKE, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 70L) {
if (Arrays.asList( //
GORGE, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 72L) {
if (Arrays.asList( //
MC_DONALD_PARK, //
SWARTZ_BAY_FERRY //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(SWARTZ_BAY_FERRY, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
MC_TAVISH_EXCH, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 75L) {
if (Arrays.asList( //
KEATING, //
SAANICHTON_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(SAANICHTON_EXCH, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
ROYAL_OAK_EXCH, //
"30 " + DOWNTOWN, //
"31 " + DOWNTOWN, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 81L) {
if (Arrays.asList( //
SAANICHTON_EXCH, //
VERDIER, //
BRENTWOOD //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(BRENTWOOD, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
SIDNEY, //
SWARTZ_BAY_FERRY //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(SWARTZ_BAY_FERRY, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 82L) {
if (Arrays.asList( //
SAANICHTON_EXCH, //
BRENTWOOD //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(BRENTWOOD, mTrip.getHeadsignId());
return true;
}
}
System.out.printf("\nUnexpected trips to merges %s & %s!\n", mTrip, mTripToMerge);
System.exit(-1);
return false;
}
private static final Pattern EXCHANGE = Pattern.compile("((^|\\W){1}(exchange)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String EXCHANGE_REPLACEMENT = "$2" + EXCH + "$4";
private static final Pattern HEIGHTS = Pattern.compile("((^|\\W){1}(Hghts)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String HEIGHTS_REPLACEMENT = "$2Hts$4";
private static final Pattern ENDS_WITH_EXPRESS = Pattern.compile("( express.*$)", Pattern.CASE_INSENSITIVE);
private static final Pattern ENDS_WITH_VIA = Pattern.compile("( via .*$)", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_TO = Pattern.compile("(^.* to )", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_TO_ = Pattern.compile("(^to )", Pattern.CASE_INSENSITIVE);
private static final Pattern ENDS_WITH_DASH = Pattern.compile("( \\- .*$)", Pattern.CASE_INSENSITIVE);
private static final Pattern ENDS_WITH_NON_STOP = Pattern.compile("( non\\-stop$)", Pattern.CASE_INSENSITIVE);
private static final Pattern ENDS_WITH_ONLY = Pattern.compile("( only$)", Pattern.CASE_INSENSITIVE);
@Override
public String cleanTripHeadsign(String tripHeadsign) {
if (Utils.isUppercaseOnly(tripHeadsign, true, true)) {
tripHeadsign = tripHeadsign.toLowerCase(Locale.ENGLISH);
}
tripHeadsign = EXCHANGE.matcher(tripHeadsign).replaceAll(EXCHANGE_REPLACEMENT);
tripHeadsign = HEIGHTS.matcher(tripHeadsign).replaceAll(HEIGHTS_REPLACEMENT);
tripHeadsign = ENDS_WITH_DASH.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = ENDS_WITH_VIA.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = STARTS_WITH_TO.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = ENDS_WITH_EXPRESS.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = STARTS_WITH_TO_.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = ENDS_WITH_NON_STOP.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = ENDS_WITH_ONLY.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = CleanUtils.cleanSlashes(tripHeadsign);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
}
private static final Pattern STARTS_WITH_BOUND = Pattern.compile("(^(east|west|north|south)bound)", Pattern.CASE_INSENSITIVE);
private static final Pattern UVIC = Pattern.compile("((^|\\W){1}(uvic)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String UVIC_REPLACEMENT = "$2" + U_VIC + "$4";
private static final Pattern STARTS_WITH_IMPL = Pattern.compile("(^(\\(\\-IMPL\\-\\)))", Pattern.CASE_INSENSITIVE);
@Override
public String cleanStopName(String gStopName) {
gStopName = STARTS_WITH_IMPL.matcher(gStopName).replaceAll(StringUtils.EMPTY);
gStopName = STARTS_WITH_BOUND.matcher(gStopName).replaceAll(StringUtils.EMPTY);
gStopName = CleanUtils.CLEAN_AT.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT);
gStopName = CleanUtils.CLEAN_AND.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AND_REPLACEMENT);
gStopName = EXCHANGE.matcher(gStopName).replaceAll(EXCHANGE_REPLACEMENT);
gStopName = UVIC.matcher(gStopName).replaceAll(UVIC_REPLACEMENT);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
return CleanUtils.cleanLabel(gStopName);
}
@Override
public int getStopId(GStop gStop) {
return Integer.parseInt(gStop.getStopCode()); // use stop code as stop ID
}
}
| src/org/mtransit/parser/ca_victoria_regional_transit_system_bus/VictoriaRegionalTransitSystemBusAgencyTools.java | package org.mtransit.parser.ca_victoria_regional_transit_system_bus;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.mtransit.commons.StrategicMappingCommons;
import org.mtransit.parser.CleanUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.Pair;
import org.mtransit.parser.SplitUtils;
import org.mtransit.parser.SplitUtils.RouteTripSpec;
import org.mtransit.parser.Utils;
import org.mtransit.parser.gtfs.data.GCalendar;
import org.mtransit.parser.gtfs.data.GCalendarDate;
import org.mtransit.parser.gtfs.data.GRoute;
import org.mtransit.parser.gtfs.data.GSpec;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.gtfs.data.GTrip;
import org.mtransit.parser.gtfs.data.GTripStop;
import org.mtransit.parser.mt.data.MAgency;
import org.mtransit.parser.mt.data.MRoute;
import org.mtransit.parser.mt.data.MTrip;
import org.mtransit.parser.mt.data.MTripStop;
// https://www.bctransit.com/open-data
// https://victoria.mapstrat.com/current/google_transit.zip
public class VictoriaRegionalTransitSystemBusAgencyTools extends DefaultAgencyTools {
public static void main(String[] args) {
if (args == null || args.length == 0) {
args = new String[3];
args[0] = "input/gtfs.zip";
args[1] = "../../mtransitapps/ca-victoria-regional-transit-system-bus-android/res/raw/";
args[2] = ""; // files-prefix
}
new VictoriaRegionalTransitSystemBusAgencyTools().start(args);
}
private HashSet<String> serviceIds;
@Override
public void start(String[] args) {
System.out.printf("\nGenerating Victoria Regional TS bus data...");
long start = System.currentTimeMillis();
this.serviceIds = extractUsefulServiceIds(args, this, true);
super.start(args);
System.out.printf("\nGenerating Victoria Regional TS bus data... DONE in %s.\n", Utils.getPrettyDuration(System.currentTimeMillis() - start));
}
@Override
public boolean excludingAll() {
return this.serviceIds != null && this.serviceIds.isEmpty();
}
private static final String INCLUDE_ONLY_SERVICE_ID_STARTS_WITH = null;
private static final String INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2 = null;
@Override
public boolean excludeCalendar(GCalendar gCalendar) {
if (INCLUDE_ONLY_SERVICE_ID_STARTS_WITH != null && !gCalendar.getServiceId().startsWith(INCLUDE_ONLY_SERVICE_ID_STARTS_WITH)
&& INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2 != null && !gCalendar.getServiceId().startsWith(INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2)) {
return true;
}
if (this.serviceIds != null) {
return excludeUselessCalendar(gCalendar, this.serviceIds);
}
return super.excludeCalendar(gCalendar);
}
@Override
public boolean excludeCalendarDate(GCalendarDate gCalendarDates) {
if (INCLUDE_ONLY_SERVICE_ID_STARTS_WITH != null && !gCalendarDates.getServiceId().startsWith(INCLUDE_ONLY_SERVICE_ID_STARTS_WITH)
&& INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2 != null && !gCalendarDates.getServiceId().startsWith(INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2)) {
return true;
}
if (this.serviceIds != null) {
return excludeUselessCalendarDate(gCalendarDates, this.serviceIds);
}
return super.excludeCalendarDate(gCalendarDates);
}
private static final String INCLUDE_AGENCY_ID = "1"; // Victoria Regional Transit System only
@Override
public boolean excludeRoute(GRoute gRoute) {
if (!INCLUDE_AGENCY_ID.equals(gRoute.getAgencyId())) {
return true;
}
return super.excludeRoute(gRoute);
}
@Override
public boolean excludeTrip(GTrip gTrip) {
if (INCLUDE_ONLY_SERVICE_ID_STARTS_WITH != null && !gTrip.getServiceId().startsWith(INCLUDE_ONLY_SERVICE_ID_STARTS_WITH)
&& INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2 != null && !gTrip.getServiceId().startsWith(INCLUDE_ONLY_SERVICE_ID_STARTS_WITH2)) {
return true;
}
if (this.serviceIds != null) {
return excludeUselessTrip(gTrip, this.serviceIds);
}
return super.excludeTrip(gTrip);
}
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
@Override
public long getRouteId(GRoute gRoute) {
return Long.parseLong(gRoute.getRouteShortName()); // use route short name as route ID
}
@Override
public String getRouteLongName(GRoute gRoute) {
String routeLongName = gRoute.getRouteLongName();
routeLongName = CleanUtils.cleanNumbers(routeLongName);
routeLongName = CleanUtils.cleanStreetTypes(routeLongName);
return CleanUtils.cleanLabel(routeLongName);
}
private static final String AGENCY_COLOR_GREEN = "34B233";// GREEN (from PDF Corporate Graphic Standards)
private static final String AGENCY_COLOR_BLUE = "002C77"; // BLUE (from PDF Corporate Graphic Standards)
private static final String AGENCY_COLOR = AGENCY_COLOR_GREEN;
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
@Override
public String getRouteColor(GRoute gRoute) {
if (StringUtils.isEmpty(gRoute.getRouteColor())) {
return AGENCY_COLOR_BLUE;
}
return super.getRouteColor(gRoute);
}
private static final String AND = " & ";
private static final String EXCH = "Exch";
private static final String DOWNTOWN = "Downtown";
private static final String OAK_BAY = "Oak Bay";
private static final String SOUTH_OAK_BAY = "South " + OAK_BAY;
private static final String ROYAL_OAK = "Royal Oak";
private static final String ROYAL_OAK_EXCH = ROYAL_OAK + " " + EXCH;
private static final String CAMOSUN = "Camosun";
private static final String JAMES_BAY = "James Bay";
private static final String DOCKYARD = "Dockyard";
private static final String ADMIRALS_WALK = "Admirals Walk";
private static final String HILLSIDE = "Hillside";
private static final String HILLSIDE_MALL = HILLSIDE + " Mall";
private static final String U_VIC = "UVic";
private static final String BRENTWOOD = "Brentwood";
private static final String SAANICHTON = "Saanichton";
private static final String SAANICHTON_EXCH = SAANICHTON + " " + EXCH;
private static final String SWARTZ_BAY = "Swartz Bay";
private static final String SWARTZ_BAY_FERRY = SWARTZ_BAY + " Ferry";
private static final String SOOKE = "Sooke";
private static final String LANGFORD = "Langford";
private static final String LANGFORD_EXCH = LANGFORD + " " + EXCH;
private static final String COLWOOD_EXCH = "Colwood " + EXCH;
private static final String HAPPY_VLY = "Happy Vly";
private static final String TILLICUM_MALL = "Tillicum Mall";
private static final String SPECTRUM_SCHOOL = "Spectrum School";
private static final String GORGE = "Gorge";
private static final String INTERURBAN = "Interurban";
private static final String MILE_HOUSE = "Mile House";
private static final String VERDIER = "Verdier";
private static final String SIDNEY = "Sidney";
private static final String BEAR_MOUTAIN = "Bear Mtn";
private static final String MC_DONALD_PARK = "McDonald Pk";
private static final String MC_TAVISH = "McTavish";
private static final String MC_TAVISH_EXCH = MC_TAVISH + " " + EXCH;
private static final String VIC_GENERAL = "Vic General";
private static final String UPTOWN = "Uptown";
private static final String RICHMOND = "Richmond";
private static final String MC_KENZIE = "McKenzie";
private static final String DOUGLAS = "Douglas";
private static final String WILLOWS = "Willows";
private static final String WESTHILLS_EXCH = "Westhills Exch";
private static final String KEATING = "Keating";
private static final String SHORELINE_SCHOOL = "Shoreline Sch";
private static final String R_JUBILEE = "R. Jubilee";
private static final String VIC_WEST = "Vic West";
private static HashMap<Long, RouteTripSpec> ALL_ROUTE_TRIPS2;
static {
HashMap<Long, RouteTripSpec> map2 = new HashMap<Long, RouteTripSpec>();
ALL_ROUTE_TRIPS2 = map2;
}
@Override
public int compareEarly(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) {
if (ALL_ROUTE_TRIPS2.containsKey(routeId)) {
return ALL_ROUTE_TRIPS2.get(routeId).compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop, this);
}
return super.compareEarly(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
@Override
public ArrayList<MTrip> splitTrip(MRoute mRoute, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return ALL_ROUTE_TRIPS2.get(mRoute.getId()).getAllTrips();
}
return super.splitTrip(mRoute, gTrip, gtfs);
}
@Override
public Pair<Long[], Integer[]> splitTripStop(MRoute mRoute, GTrip gTrip, GTripStop gTripStop, ArrayList<MTrip> splitTrips, GSpec routeGTFS) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return SplitUtils.splitTripStop(mRoute, gTrip, gTripStop, routeGTFS, ALL_ROUTE_TRIPS2.get(mRoute.getId()), this);
}
return super.splitTripStop(mRoute, gTrip, gTripStop, splitTrips, routeGTFS);
}
@Override
public void setTripHeadsign(MRoute mRoute, MTrip mTrip, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return; // split
}
if (mRoute.getId() == 1L) {
if (gTrip.getDirectionId() == 0) { // DOWNTOWN - WEST
if (Arrays.asList( //
"Downtown" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // SOUTH OAK BAY - EAST
if (Arrays.asList( //
"South Oak Bay via Richardson" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mRoute.getId() == 2L) {
if (gTrip.getDirectionId() == 0) { // JAMES BAY - WEST
if (Arrays.asList( //
"James Bay - Fisherman's Wharf" // <>
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // OAK BAY - EAST
if (Arrays.asList( //
"James Bay - Fisherman's Wharf", // <>
"Downtown", //
"Downtown Only", //
"South Oak Bay - Oak Bay Village", //
"Willows - Oak Bay Village" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mRoute.getId() == 3L) {
if (gTrip.getDirectionId() == 0) { // JAMES BAY - CLOCKWISE
if (Arrays.asList( //
"Downtown Only", //
"James Bay To 10 R. Jubilee", //
"James Bay - Linden to 10 R. Jubilee", //
"James Bay - Quimper To 10 R. Jubilee" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
return;
}
} else if (gTrip.getDirectionId() == 1) { // ROYAL JUBILEE - COUNTERCLOCKWISE
if (Arrays.asList( //
"Royal Jubilee - Cook St Village", //
"Royal Jubilee - Cook St Vlg/Quimper" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mRoute.getId() == 4L) {
if (gTrip.getDirectionId() == 0) { // DOWNTOWN - WEST
if (Arrays.asList( //
"Downtown", //
"To Gorge & Douglas" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic Via Hillside" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mRoute.getId() == 6L) {
if (gTrip.getDirectionId() == 0) { // ROYAL OAK - NORTH
if (Arrays.asList( //
"Royal Oak Exch Via Royal Oak Mall", //
"6A Royal Oak Exch Via Emily Carr", //
"6B Royal Oak Exch Via Chatterton" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"Downtown", //
"6B Downtown Via Chatterton", //
"6A Downtown Via Emily Carr" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mRoute.getId() == 7L) {
if (gTrip.getDirectionId() == 0) { // DOWNTOWN - CLOCKWISE
if (Arrays.asList( //
"Downtown Only", //
"7N Downtown Only", //
"Downtown - To 21 Interurban", //
"7N Downtown - To 21 Interurban", //
"Downtown To 21 Interurban" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - COUNTERCLOCKWISE
if (Arrays.asList( //
"UVic Via Fairfield", //
"7N UVic - Cook St Village" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 8L) {
if (gTrip.getDirectionId() == 0) { // INTERURBAN - WEST
if (Arrays.asList( //
"Tillicum Mall Via Finalyson", //
"Interurban Via Finlayson" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // OAK BAY - EAST
if (Arrays.asList( //
"To Richmond & Oak Bay Ave Only", //
"To Douglas Only - Mayfair Mall", //
"Oak Bay Via Finlayson", //
"Oak Bay Via Finalyson" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 9L) {
if (gTrip.getDirectionId() == 0) { // ROYAL OAK - WEST
if (Arrays.asList( //
"Royal Oak Exch - Hillside/Gorge" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic - Gorge/Hillside" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 10L) {
if (gTrip.getDirectionId() == 0) { // ROYAL JUBILEE - CLOCKWISE
if (Arrays.asList( //
"Royal Jubilee Via Vic West" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
return;
}
} else if (gTrip.getDirectionId() == 1) { // JAMES BAY - COUNTERCLOCKWISE
if (Arrays.asList( //
"James Bay - To 3 R. Jubilee", //
"To Vic West Only" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 11L) {
if (gTrip.getDirectionId() == 0) { // TILLICUM MALL - WEST
if (Arrays.asList( //
"Tillicum Mall Via Gorge" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"Downtown", //
"Downtown Only", //
"UVic Via Uplands" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 12L) {
if (gTrip.getDirectionId() == 0) { // UNIVERSITY HGTS - WEST
if (Arrays.asList( //
"University Hgts Via Kenmore" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic Via Kenmore" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 13L) {
if (gTrip.getDirectionId() == 0) { // UVIC - WEST
if (Arrays.asList( //
"UVic" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // TEN MILE POINT - EAST
if (Arrays.asList( //
"Ten Mile Point" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 14L) {
if (gTrip.getDirectionId() == 0) { // VIC GENERAL - WEST
if (Arrays.asList( //
"Downtown", //
"Vic General Via Craigflower" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"Downtown Only", //
"Downtown", //
"UVic", //
"UVic Via Richmond" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 15L) {
if (gTrip.getDirectionId() == 0) { // ESQUIMALT - WEST
if (Arrays.asList( //
"Esquimalt", //
"Esquimalt - Fort/Yates Exp" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"Downtown", //
"UVic - Foul Bay Exp" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 16L) {
if (gTrip.getDirectionId() == 0) { // UPTOWN - WEST
if (Arrays.asList( //
"Uptown - McKenzie Exp" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic - McKenzie Exp" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 17L) {
if (gTrip.getDirectionId() == 0) { // Downtown - WEST
if (Arrays.asList( //
"Downtown Via Quadra" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic Via Cedar Hill Sch" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 21L) {
if (gTrip.getDirectionId() == 0) { // INTERURBAN - CLOCKWISE
if (Arrays.asList( //
"Interurban - VI Tech Park", //
"Interurban - Camosun Only", //
"Interurban - Viaduct Loop", //
"21N Camosun Via Burnside" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - COUNTERCLOCKWISE
if (Arrays.asList( //
"Downtown To 7 UVic" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 22L) {
if (gTrip.getDirectionId() == 0) { // VIC GENERAL - NORTH
if (Arrays.asList( //
"Downtown Only", //
"Downtown", //
"Vic General - Watkiss Way Via Burnside", //
"22A Vic General - Watkiss Wy Via S. Vale", //
"22A Vic General Via Straw Vale", //
"22A Vic General Via S. Vale", //
"To Spectrum School", //
"Vic General Via Burnside" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // NILLSIDE MALL - SOUTH
if (Arrays.asList( //
"Downtown Only", //
"Downtown", //
"22A Hillside Mall Via Straw Vale", //
"Hillside Mall Via Fernwood" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 24L) {
if (gTrip.getDirectionId() == 0) { // Admirals Walk - WEST
if (Arrays.asList( //
"Downtown Only", //
"Downtown", //
"Admirals Walk Via Parklands/Colville", //
"Admirals Walk Via Colville" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // Cedar Hill - EAST
if (Arrays.asList( //
"Cedar Hill", //
"Cedar Hill Via Parklands" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 25L) {
if (gTrip.getDirectionId() == 0) { // Admirals Walk - WEST
if (Arrays.asList( //
"Shoreline Sch Via Munro", //
"Admirals Walk Via Munro" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // Maplewood - EAST
if (Arrays.asList( //
"Maplewood" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 26L) {
if (gTrip.getDirectionId() == 0) { // DOCKYARD - WEST
if (Arrays.asList( //
"To Uptown Only", //
"Dockyard Via McKenzie" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"To Uptown Only", //
"UVic Via McKenzie" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 27L) {
if (gTrip.getDirectionId() == 0) { // GORDON HEAD - NORTH
if (Arrays.asList( //
"Gordon Head Via Shelbourne" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"27X Express To Downtown", //
"To Hillside Only", //
"Downtown" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 28L) {
if (gTrip.getDirectionId() == 0) { // MAJESTIC - NORTH
if (Arrays.asList( //
"28X Express To Majestic", //
"Majestic Via Shelbourne" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"To McKenzie Only", //
"To Hillside Only", //
"Downtown" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 30L) {
if (Arrays.asList( //
"Royal Oak Exch Via Carey", //
"Royal Oak Exch To 75 Saanichton" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
if (Arrays.asList( //
"Downtown" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
} else if (mTrip.getRouteId() == 31L) {
if (gTrip.getDirectionId() == 0) { // ROYAL OAK - NORTH
if (Arrays.asList( //
"Royal Oak Exch To 75 Saanichton", //
"Royal Oak Exch Via Glanford" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"To Gorge Only", //
"To Uptown Only", //
"Downtown" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 32L) {
if (gTrip.getDirectionId() == 0) { // Cordova Bay - NORTH
if (Arrays.asList( //
"Cordova Bay" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // ROYAL OAK - SOUTH
if (Arrays.asList( //
"Downtown", //
"Royal Oak Exch" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 35L) {
// TODO split? NORTH/SOUTH
if (gTrip.getDirectionId() == 0) { // Ridge - CLOCKWISE
if (Arrays.asList( //
"Ridge" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 39L) {
if (gTrip.getDirectionId() == 0) { // WESTHILLS - WEST
if (Arrays.asList( //
"Royal Oak Exch", //
"Interurban", //
"Westhills Exch" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic Via Royal Oak" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 43L) {
if (gTrip.getDirectionId() == 0) { // ROYAL ROADS - CLOCKWISE
if (Arrays.asList( //
"Belmont Park - Royal Roads" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 46L) {
if (gTrip.getDirectionId() == 0) { // WESTHILLS - WEST
if (Arrays.asList( //
"Westhills Exch" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOCKYARD - EAST
if (Arrays.asList( //
"Dockyard" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 47L) {
if (gTrip.getDirectionId() == 0) { // GOLDSTREAM MEADOWS - WEST
if (Arrays.asList( //
"Goldstream Mdws Via Thetis Hgts" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
if (Arrays.asList( //
"Downtown" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 48L) {
if (gTrip.getDirectionId() == 0) { // HAPPY VALLEY - WEST
if (Arrays.asList( //
"Happy Valley via Colwood", //
"HAPPY VALLEY VIA COLWOOD" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
if (Arrays.asList( //
"Downtown" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 50L) {
if (gTrip.getDirectionId() == 0) { // LANGFORD - WEST
if (Arrays.asList( //
"Langford To 61 Sooke", //
"Langford" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
if (Arrays.asList( //
"Downtown" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 51L) {
if (gTrip.getDirectionId() == 0) { // LANGFORD - WEST
if (Arrays.asList( //
"Langford - McKenzie Exp" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
if (Arrays.asList( //
"UVic - McKenzie Exp" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 52L) {
if (gTrip.getDirectionId() == 0) { // BEAR MOUNTAIN - WEST
if (Arrays.asList( //
"Langford Exch Via Royal Bay", //
"Langford Exch Via Lagoon", //
"Langford Exch", //
"Bear Mountain - Lagoon/Royal Bay", //
"Bear Mountain Via Lagoon", //
"Bear Mountain" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // COLWOOD EXCHANGE - EAST
if (Arrays.asList( //
"Langford Exch", //
"Colwood Exch Via Royal Bay/Lagoon", //
"Colwood Exch Via Royal Bay", //
"Colwood Exch Via Lagoon", //
"Colwood Exch" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 53L) {
if (gTrip.getDirectionId() == 0) { // COLWOOD EXCHANGE - CLOCKWISE
if (Arrays.asList( //
"Colwood Exch Via Atkins - Thetis Lk", //
"Colwood Exch Via Atkins" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
return;
}
} else if (gTrip.getDirectionId() == 1) { // LANGFORD EXCHANGE - COUNTERCLOCKWISE
if (Arrays.asList( //
"Langford Exch Via Atkins - Theits Lk", //
"Langford Exch Via Atkins" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 54L) {
if (gTrip.getDirectionId() == 0) { // LANGFORD EXCHANGE - CLOCKWISE
if (Arrays.asList( //
"Metchosin" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 55L) {
if (gTrip.getDirectionId() == 1) { // LANGFORD EXCHANGE - COUNTERCLOCKWISE
if (Arrays.asList( //
"Happy Valley To Colwood Exch", //
"Happy Valley" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 56L) {
if (gTrip.getDirectionId() == 0) { // THETIS HEIGHTS - NORTH
if (Arrays.asList( //
"Thetis Heights Via Florence Lake" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // LANGFORD EXCHANGE - SOUTH
if (Arrays.asList( //
"Langford Exch" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 57L) {
if (gTrip.getDirectionId() == 0) { // THETIS HEIGHTS - NORTH
if (Arrays.asList( //
"Theits Heights Via Millstream" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // LANGFORD EXCHANGE - SOUTH
if (Arrays.asList( //
"Langford Exch" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 58L) {
if (gTrip.getDirectionId() == 1) { // GOLDSTREAM MEADOWS - OUTBOUND
if (Arrays.asList( //
"Goldstream Mdws" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.OUTBOUND);
return;
}
}
} else if (mTrip.getRouteId() == 59L) {
if (gTrip.getDirectionId() == 1) { // LANGFORD EXCHANGE - COUNTERCLOCKWISE
if (Arrays.asList( //
"Triangle Mtn Via Royal Bay", //
"Triangle Mtn" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 60L) {
if (gTrip.getDirectionId() == 0) { // LANGFORD EXCHANGE - CLOCKWISE
if (Arrays.asList( //
"Wishart Via Royal Bay", //
"Wishart" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 61L) {
if (gTrip.getDirectionId() == 0) { // SOOKE - WEST
if (Arrays.asList( //
"Sooke" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
if (Arrays.asList( //
"Langford - Jacklin/Station", //
"Langford Exch To 50 Downtown", //
"Downtown" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 63L) {
// TODO split?
if (gTrip.getDirectionId() == 0) { // OTTER POINT - WEST
if (Arrays.asList( //
"Otter Point" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
}
} else if (mTrip.getRouteId() == 64L) {
// TODO split
if (gTrip.getDirectionId() == 0) { // SOOKE - CLOCKWISE
if (Arrays.asList( //
"East Sooke To 17 Mile House", //
"East Sooke To Langford", //
"East Sooke To Sooke" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
return;
}
}
if (isGoodEnoughAccepted()) {
if (gTrip.getDirectionId() == 1) { // SOOKE - ????
if (Arrays.asList( //
"East Sooke" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
}
} else if (mTrip.getRouteId() == 65L) {
if (gTrip.getDirectionId() == 0) { // SOOKE - WEST
if (Arrays.asList( //
"Sooke Via Westhills" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
if (Arrays.asList( //
"Downtown Via Westhills" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
return;
}
}
} else if (mTrip.getRouteId() == 70L) {
if (gTrip.getDirectionId() == 0) { // SWARTZ BAY FERRY - NORTH
if (Arrays.asList( //
"Swartz Bay Ferry Via Hwy #17" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"To Gorge Only Via Hwy #17", //
"Downtown Via Hwy #17" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 71L) {
if (gTrip.getDirectionId() == 0) { // SWARTZ BAY FERRY - NORTH
if (Arrays.asList( //
"Swartz Bay Ferry Via West Sidney" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"Downtown" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 72L) {
if (gTrip.getDirectionId() == 0) { // SWARTZ BAY FERRY - NORTH
if (Arrays.asList( //
"McDonald Park Via Saanichton", //
"Swartz Bay Ferry Via Saanichton" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"McTavish Exch", //
"Downtown" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 75L) {
if (gTrip.getDirectionId() == 0) { // SAANICHTON - NORTH
if (Arrays.asList( //
"To Keating Only", //
"Saanichton Exch Via Verdier", //
"Saanichton Exch" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
if (Arrays.asList( //
"Royal Oak Exch To 30 Downtown", //
"Royal Oak Exch To 31 Downtown", //
"Downtown" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 76L) {
if (isGoodEnoughAccepted()) { // TODO check
if (gTrip.getDirectionId() == 0) { // SWARTZ BAY FERRY - NORTH
if (Arrays.asList( //
"Swartz Bay Ferry Non-Stop" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // UVIC - SOUTH
if (Arrays.asList( //
"UVic - Via Express" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
}
} else if (mTrip.getRouteId() == 81L) {
if (gTrip.getDirectionId() == 0) { // SWARTZ BAY FERRY - NORTH
if (Arrays.asList( //
"To Sidney Only", //
"Swartz Bay Ferry" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // BRENTWOOD - SOUTH
if (Arrays.asList( //
"Saanichton Exch", //
"Brentwood To Verdier Only", //
"Brentwood" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 82L) {
if (gTrip.getDirectionId() == 0) { // SIDNEY - NORTH
if (Arrays.asList( //
"Sidney Via Stautw" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // SAANICHTON - SOUTH
if (Arrays.asList( //
"To Brentwood Via Stautw", //
"Saanichton Exch Via Stautw" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 83L) {
if (gTrip.getDirectionId() == 0) { // SIDNEY - NORTH
if (Arrays.asList( //
"Sidney Via West Saanich" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // ROYAL OAK - SOUTH
if (Arrays.asList( //
"Royal Oak Exch Via West Saanich" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 85L) {
// TODO split
if (gTrip.getDirectionId() == 0) { // NORTH SAANICH - CLOCKWISE
if (Arrays.asList( //
"North Saanich" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
return;
}
} else if (gTrip.getDirectionId() == 1) { // NORTH SAANICH - CLOCKWISE
if (Arrays.asList( //
"North Saanich" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
return;
}
}
} else if (mTrip.getRouteId() == 87L) {
if (gTrip.getDirectionId() == 0) { // SIDNEY - NORTH
if (Arrays.asList( //
"Sidney" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // SAANICHTON - SOUTH
if (Arrays.asList( //
"Dean Park Via Airport To Saanichton" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 88L) {
if (gTrip.getDirectionId() == 0) { // SIDNEY - NORTH
if (Arrays.asList( //
"Sidney" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
return;
}
} else if (gTrip.getDirectionId() == 1) { // AIRPORT - SOUTH
if (Arrays.asList( //
"Airport" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
return;
}
}
} else if (mTrip.getRouteId() == 11_802L) { // ARB
if (isGoodEnoughAccepted()) { // TODO check
if (gTrip.getDirectionId() == 1) { // ??? - ????
if (Arrays.asList( //
"Shuttle Bus" //
).contains(gTrip.getTripHeadsign())) {
mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
return;
}
}
}
}
System.out.printf("\n%s: Unexpected trips headsign for %s!\n", mTrip.getRouteId(), gTrip);
System.exit(-1);
}
@Override
public boolean mergeHeadsign(MTrip mTrip, MTrip mTripToMerge) {
List<String> headsignsValues = Arrays.asList(mTrip.getHeadsignValue(), mTripToMerge.getHeadsignValue());
if (mTrip.getRouteId() == 2L) {
if (Arrays.asList( //
JAMES_BAY, // <>
DOWNTOWN, // <>
WILLOWS, //
SOUTH_OAK_BAY //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(SOUTH_OAK_BAY, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 3L) {
if (Arrays.asList( //
DOWNTOWN, // <>
R_JUBILEE, //
JAMES_BAY // ++
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(JAMES_BAY, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 4L) {
if (Arrays.asList( //
GORGE + AND + DOUGLAS, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 6L) {
if (Arrays.asList( //
"A " + DOWNTOWN, //
"B " + DOWNTOWN, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
"A " + ROYAL_OAK_EXCH, //
"B " + ROYAL_OAK_EXCH, //
ROYAL_OAK_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(ROYAL_OAK_EXCH, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 7L) {
if (Arrays.asList( //
"N " + U_VIC, //
U_VIC //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(U_VIC, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
INTERURBAN, //
"N " + DOWNTOWN, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 8L) {
if (Arrays.asList( //
DOUGLAS, //
RICHMOND + AND + OAK_BAY + " Ave", //
OAK_BAY //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(OAK_BAY, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
TILLICUM_MALL, //
INTERURBAN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(INTERURBAN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 10L) {
if (Arrays.asList( //
VIC_WEST, //
JAMES_BAY //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(JAMES_BAY, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 11L) {
if (Arrays.asList( //
DOWNTOWN, //
U_VIC //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(U_VIC, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 14L) {
if (Arrays.asList( //
DOWNTOWN, // <>
U_VIC //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(U_VIC, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
DOWNTOWN, // <>
VIC_GENERAL //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(VIC_GENERAL, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 15L) {
if (Arrays.asList( //
DOWNTOWN, //
U_VIC //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(U_VIC, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 21L) {
if (Arrays.asList( //
"N " + CAMOSUN, //
INTERURBAN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(INTERURBAN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 22L) {
if (Arrays.asList( //
DOWNTOWN, // <>
"A " + VIC_GENERAL, //
SPECTRUM_SCHOOL, //
VIC_GENERAL //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(VIC_GENERAL, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
DOWNTOWN, // <>
"A " + HILLSIDE_MALL, //
HILLSIDE_MALL //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(HILLSIDE_MALL, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 24L) {
if (Arrays.asList( //
DOWNTOWN, //
ADMIRALS_WALK //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(ADMIRALS_WALK, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 25L) {
if (Arrays.asList( //
SHORELINE_SCHOOL, //
ADMIRALS_WALK //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(ADMIRALS_WALK, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 26L) {
if (Arrays.asList( //
UPTOWN, // <>
U_VIC //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(U_VIC, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
UPTOWN, // <>
DOCKYARD //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOCKYARD, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 27L) {
if (Arrays.asList( //
HILLSIDE, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 28L) {
if (Arrays.asList( //
MC_KENZIE, //
HILLSIDE, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 30L) {
if (Arrays.asList( //
SAANICHTON, //
ROYAL_OAK_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(ROYAL_OAK_EXCH, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 31L) {
if (Arrays.asList( //
SAANICHTON, //
ROYAL_OAK_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(ROYAL_OAK_EXCH, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
GORGE, //
UPTOWN, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 32L) {
if (Arrays.asList( //
DOWNTOWN, //
ROYAL_OAK_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(ROYAL_OAK_EXCH, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 39L) {
if (Arrays.asList( //
ROYAL_OAK_EXCH, //
INTERURBAN, //
WESTHILLS_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(WESTHILLS_EXCH, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 50L) {
if (Arrays.asList( //
LANGFORD, //
SOOKE //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(LANGFORD, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 52L) {
if (Arrays.asList( //
LANGFORD_EXCH, // <>
COLWOOD_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(COLWOOD_EXCH, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
LANGFORD_EXCH, // <>
BEAR_MOUTAIN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(BEAR_MOUTAIN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 55L) {
if (Arrays.asList( //
COLWOOD_EXCH, //
HAPPY_VLY //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(HAPPY_VLY, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 61L) {
if (Arrays.asList( //
LANGFORD, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 64L) {
if (Arrays.asList( //
MILE_HOUSE, //
LANGFORD, //
SOOKE //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(SOOKE, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 70L) {
if (Arrays.asList( //
GORGE, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 72L) {
if (Arrays.asList( //
MC_DONALD_PARK, //
SWARTZ_BAY_FERRY //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(SWARTZ_BAY_FERRY, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
MC_TAVISH_EXCH, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 75L) {
if (Arrays.asList( //
KEATING, //
SAANICHTON_EXCH //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(SAANICHTON_EXCH, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
ROYAL_OAK_EXCH, //
DOWNTOWN //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 81L) {
if (Arrays.asList( //
SAANICHTON_EXCH, //
VERDIER, //
BRENTWOOD //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(BRENTWOOD, mTrip.getHeadsignId());
return true;
} else if (Arrays.asList( //
SIDNEY, //
SWARTZ_BAY_FERRY //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(SWARTZ_BAY_FERRY, mTrip.getHeadsignId());
return true;
}
} else if (mTrip.getRouteId() == 82L) {
if (Arrays.asList( //
SAANICHTON_EXCH, //
BRENTWOOD //
).containsAll(headsignsValues)) {
mTrip.setHeadsignString(BRENTWOOD, mTrip.getHeadsignId());
return true;
}
}
System.out.printf("\nUnexpected trips to merges %s & %s!\n", mTrip, mTripToMerge);
System.exit(-1);
return false;
}
private static final Pattern EXCHANGE = Pattern.compile("((^|\\W){1}(exchange)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String EXCHANGE_REPLACEMENT = "$2" + EXCH + "$4";
private static final Pattern HEIGHTS = Pattern.compile("((^|\\W){1}(Hghts)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String HEIGHTS_REPLACEMENT = "$2Hts$4";
private static final Pattern STARTS_WITH_NUMBER = Pattern.compile("(^[\\d]+)", Pattern.CASE_INSENSITIVE);
private static final Pattern ENDS_WITH_EXPRESS = Pattern.compile("( express.*$)", Pattern.CASE_INSENSITIVE);
private static final Pattern ENDS_WITH_VIA = Pattern.compile("( via .*$)", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_TO = Pattern.compile("(^.* to )", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_TO_ = Pattern.compile("(^to )", Pattern.CASE_INSENSITIVE);
private static final Pattern ENDS_WITH_DASH = Pattern.compile("( \\- .*$)", Pattern.CASE_INSENSITIVE);
private static final Pattern ENDS_WITH_NON_STOP = Pattern.compile("( non\\-stop$)", Pattern.CASE_INSENSITIVE);
private static final Pattern ENDS_WITH_ONLY = Pattern.compile("( only$)", Pattern.CASE_INSENSITIVE);
@Override
public String cleanTripHeadsign(String tripHeadsign) {
if (Utils.isUppercaseOnly(tripHeadsign, true, true)) {
tripHeadsign = tripHeadsign.toLowerCase(Locale.ENGLISH);
}
tripHeadsign = EXCHANGE.matcher(tripHeadsign).replaceAll(EXCHANGE_REPLACEMENT);
tripHeadsign = HEIGHTS.matcher(tripHeadsign).replaceAll(HEIGHTS_REPLACEMENT);
tripHeadsign = ENDS_WITH_DASH.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = ENDS_WITH_VIA.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = STARTS_WITH_TO.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = ENDS_WITH_EXPRESS.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = STARTS_WITH_NUMBER.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = STARTS_WITH_TO_.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = ENDS_WITH_NON_STOP.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = ENDS_WITH_ONLY.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = CleanUtils.cleanSlashes(tripHeadsign);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
}
private static final Pattern STARTS_WITH_BOUND = Pattern.compile("(^(east|west|north|south)bound)", Pattern.CASE_INSENSITIVE);
private static final Pattern UVIC = Pattern.compile("((^|\\W){1}(uvic)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String UVIC_REPLACEMENT = "$2" + U_VIC + "$4";
private static final Pattern STARTS_WITH_IMPL = Pattern.compile("(^(\\(\\-IMPL\\-\\)))", Pattern.CASE_INSENSITIVE);
@Override
public String cleanStopName(String gStopName) {
gStopName = STARTS_WITH_IMPL.matcher(gStopName).replaceAll(StringUtils.EMPTY);
gStopName = STARTS_WITH_BOUND.matcher(gStopName).replaceAll(StringUtils.EMPTY);
gStopName = CleanUtils.CLEAN_AT.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT);
gStopName = CleanUtils.CLEAN_AND.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AND_REPLACEMENT);
gStopName = EXCHANGE.matcher(gStopName).replaceAll(EXCHANGE_REPLACEMENT);
gStopName = UVIC.matcher(gStopName).replaceAll(UVIC_REPLACEMENT);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
return CleanUtils.cleanLabel(gStopName);
}
@Override
public int getStopId(GStop gStop) {
return Integer.parseInt(gStop.getStopCode()); // use stop code as stop ID
}
}
| Compatibility with latest update
| src/org/mtransit/parser/ca_victoria_regional_transit_system_bus/VictoriaRegionalTransitSystemBusAgencyTools.java | Compatibility with latest update | <ide><path>rc/org/mtransit/parser/ca_victoria_regional_transit_system_bus/VictoriaRegionalTransitSystemBusAgencyTools.java
<ide>
<ide> @Override
<ide> public long getRouteId(GRoute gRoute) {
<add> if ("ARB".equalsIgnoreCase(gRoute.getRouteShortName())) {
<add> return 999L;
<add> }
<ide> return Long.parseLong(gRoute.getRouteShortName()); // use route short name as route ID
<ide> }
<ide>
<ide> if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
<ide> return; // split
<ide> }
<add> String tripHeadsign = gTrip.getTripHeadsign();
<add> tripHeadsign = Pattern.compile("(^" + mRoute.getShortName() + "( )?)", Pattern.CASE_INSENSITIVE).matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
<ide> if (mRoute.getId() == 1L) {
<ide> if (gTrip.getDirectionId() == 0) { // DOWNTOWN - WEST
<ide> if (Arrays.asList( //
<ide> "Downtown" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // SOUTH OAK BAY - EAST
<ide> if (Arrays.asList( //
<ide> "South Oak Bay via Richardson" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // JAMES BAY - WEST
<ide> if (Arrays.asList( //
<ide> "James Bay - Fisherman's Wharf" // <>
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // OAK BAY - EAST
<ide> "Downtown Only", //
<ide> "South Oak Bay - Oak Bay Village", //
<ide> "Willows - Oak Bay Village" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> "James Bay To 10 R. Jubilee", //
<ide> "James Bay - Linden to 10 R. Jubilee", //
<ide> "James Bay - Quimper To 10 R. Jubilee" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // ROYAL JUBILEE - COUNTERCLOCKWISE
<ide> if (Arrays.asList( //
<ide> "Royal Jubilee - Cook St Village", //
<ide> "Royal Jubilee - Cook St Vlg/Quimper" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "Downtown", //
<ide> "To Gorge & Douglas" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
<ide> if (Arrays.asList( //
<ide> "UVic Via Hillside" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // ROYAL OAK - NORTH
<ide> if (Arrays.asList( //
<ide> "Royal Oak Exch Via Royal Oak Mall", //
<del> "6A Royal Oak Exch Via Emily Carr", //
<del> "6B Royal Oak Exch Via Chatterton" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> "A Royal Oak Exch Via Emily Carr", //
<add> "B Royal Oak Exch Via Chatterton" //
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
<ide> if (Arrays.asList( //
<ide> "Downtown", //
<del> "6B Downtown Via Chatterton", //
<del> "6A Downtown Via Emily Carr" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> "B Downtown Via Chatterton", //
<add> "A Downtown Via Emily Carr" //
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // DOWNTOWN - CLOCKWISE
<ide> if (Arrays.asList( //
<ide> "Downtown Only", //
<del> "7N Downtown Only", //
<add> "N Downtown Only", //
<ide> "Downtown - To 21 Interurban", //
<del> "7N Downtown - To 21 Interurban", //
<add> "N Downtown - To 21 Interurban", //
<ide> "Downtown To 21 Interurban" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // UVIC - COUNTERCLOCKWISE
<ide> if (Arrays.asList( //
<ide> "UVic Via Fairfield", //
<del> "7N UVic - Cook St Village" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
<add> "N UVic - Cook St Village" //
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "Tillicum Mall Via Finalyson", //
<ide> "Interurban Via Finlayson" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // OAK BAY - EAST
<ide> "To Douglas Only - Mayfair Mall", //
<ide> "Oak Bay Via Finlayson", //
<ide> "Oak Bay Via Finalyson" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // ROYAL OAK - WEST
<ide> if (Arrays.asList( //
<ide> "Royal Oak Exch - Hillside/Gorge" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
<ide> if (Arrays.asList( //
<ide> "UVic - Gorge/Hillside" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // ROYAL JUBILEE - CLOCKWISE
<ide> if (Arrays.asList( //
<ide> "Royal Jubilee Via Vic West" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // JAMES BAY - COUNTERCLOCKWISE
<ide> if (Arrays.asList( //
<ide> "James Bay - To 3 R. Jubilee", //
<ide> "To Vic West Only" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // TILLICUM MALL - WEST
<ide> if (Arrays.asList( //
<ide> "Tillicum Mall Via Gorge" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
<ide> "Downtown", //
<ide> "Downtown Only", //
<ide> "UVic Via Uplands" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // UNIVERSITY HGTS - WEST
<ide> if (Arrays.asList( //
<ide> "University Hgts Via Kenmore" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
<ide> if (Arrays.asList( //
<ide> "UVic Via Kenmore" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // UVIC - WEST
<ide> if (Arrays.asList( //
<ide> "UVic" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // TEN MILE POINT - EAST
<ide> if (Arrays.asList( //
<ide> "Ten Mile Point" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "Downtown", //
<ide> "Vic General Via Craigflower" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
<ide> "Downtown", //
<ide> "UVic", //
<ide> "UVic Via Richmond" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "Esquimalt", //
<ide> "Esquimalt - Fort/Yates Exp" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
<ide> if (Arrays.asList( //
<ide> "Downtown", //
<ide> "UVic - Foul Bay Exp" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // UPTOWN - WEST
<ide> if (Arrays.asList( //
<ide> "Uptown - McKenzie Exp" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
<ide> if (Arrays.asList( //
<ide> "UVic - McKenzie Exp" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // Downtown - WEST
<ide> if (Arrays.asList( //
<ide> "Downtown Via Quadra" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
<ide> if (Arrays.asList( //
<ide> "UVic Via Cedar Hill Sch" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> "Interurban - VI Tech Park", //
<ide> "Interurban - Camosun Only", //
<ide> "Interurban - Viaduct Loop", //
<del> "21N Camosun Via Burnside" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
<add> "N Camosun Via Burnside" //
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - COUNTERCLOCKWISE
<ide> if (Arrays.asList( //
<ide> "Downtown To 7 UVic" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> "Downtown Only", //
<ide> "Downtown", //
<ide> "Vic General - Watkiss Way Via Burnside", //
<del> "22A Vic General - Watkiss Wy Via S. Vale", //
<del> "22A Vic General Via Straw Vale", //
<del> "22A Vic General Via S. Vale", //
<add> "A Vic General - Watkiss Wy Via S. Vale", //
<add> "A Vic General Via Straw Vale", //
<add> "A Vic General Via S. Vale", //
<ide> "To Spectrum School", //
<ide> "Vic General Via Burnside" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // NILLSIDE MALL - SOUTH
<ide> if (Arrays.asList( //
<ide> "Downtown Only", //
<ide> "Downtown", //
<del> "22A Hillside Mall Via Straw Vale", //
<add> "A Hillside Mall Via Straw Vale", //
<ide> "Hillside Mall Via Fernwood" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> "Downtown", //
<ide> "Admirals Walk Via Parklands/Colville", //
<ide> "Admirals Walk Via Colville" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // Cedar Hill - EAST
<ide> if (Arrays.asList( //
<ide> "Cedar Hill", //
<ide> "Cedar Hill Via Parklands" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "Shoreline Sch Via Munro", //
<ide> "Admirals Walk Via Munro" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // Maplewood - EAST
<ide> if (Arrays.asList( //
<ide> "Maplewood" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "To Uptown Only", //
<ide> "Dockyard Via McKenzie" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
<ide> if (Arrays.asList( //
<ide> "To Uptown Only", //
<ide> "UVic Via McKenzie" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // GORDON HEAD - NORTH
<ide> if (Arrays.asList( //
<ide> "Gordon Head Via Shelbourne" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
<ide> if (Arrays.asList( //
<del> "27X Express To Downtown", //
<add> "X Express To Downtown", //
<ide> "To Hillside Only", //
<ide> "Downtown" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> } else if (mTrip.getRouteId() == 28L) {
<ide> if (gTrip.getDirectionId() == 0) { // MAJESTIC - NORTH
<ide> if (Arrays.asList( //
<del> "28X Express To Majestic", //
<add> "X Express To Majestic", //
<ide> "Majestic Via Shelbourne" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
<ide> "To McKenzie Only", //
<ide> "To Hillside Only", //
<ide> "Downtown" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "Royal Oak Exch Via Carey", //
<ide> "Royal Oak Exch To 75 Saanichton" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> if (Arrays.asList( //
<ide> "Downtown" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> } else if (mTrip.getRouteId() == 31L) {
<ide> if (Arrays.asList( //
<ide> "Royal Oak Exch To 75 Saanichton", //
<ide> "Royal Oak Exch Via Glanford" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
<ide> "To Gorge Only", //
<ide> "To Uptown Only", //
<ide> "Downtown" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // Cordova Bay - NORTH
<ide> if (Arrays.asList( //
<ide> "Cordova Bay" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // ROYAL OAK - SOUTH
<ide> if (Arrays.asList( //
<ide> "Downtown", //
<ide> "Royal Oak Exch" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // Ridge - CLOCKWISE
<ide> if (Arrays.asList( //
<ide> "Ridge" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> "Royal Oak Exch", //
<ide> "Interurban", //
<ide> "Westhills Exch" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
<ide> if (Arrays.asList( //
<ide> "UVic Via Royal Oak" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // ROYAL ROADS - CLOCKWISE
<ide> if (Arrays.asList( //
<ide> "Belmont Park - Royal Roads" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // WESTHILLS - WEST
<ide> if (Arrays.asList( //
<ide> "Westhills Exch" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOCKYARD - EAST
<ide> if (Arrays.asList( //
<ide> "Dockyard" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // GOLDSTREAM MEADOWS - WEST
<ide> if (Arrays.asList( //
<ide> "Goldstream Mdws Via Thetis Hgts" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
<ide> if (Arrays.asList( //
<ide> "Downtown" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "Happy Valley via Colwood", //
<ide> "HAPPY VALLEY VIA COLWOOD" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
<ide> if (Arrays.asList( //
<ide> "Downtown" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "Langford To 61 Sooke", //
<ide> "Langford" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
<ide> if (Arrays.asList( //
<ide> "Downtown" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // LANGFORD - WEST
<ide> if (Arrays.asList( //
<ide> "Langford - McKenzie Exp" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // UVIC - EAST
<ide> if (Arrays.asList( //
<ide> "UVic - McKenzie Exp" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> "Bear Mountain - Lagoon/Royal Bay", //
<ide> "Bear Mountain Via Lagoon", //
<ide> "Bear Mountain" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // COLWOOD EXCHANGE - EAST
<ide> "Colwood Exch Via Royal Bay", //
<ide> "Colwood Exch Via Lagoon", //
<ide> "Colwood Exch" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "Colwood Exch Via Atkins - Thetis Lk", //
<ide> "Colwood Exch Via Atkins" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // LANGFORD EXCHANGE - COUNTERCLOCKWISE
<ide> if (Arrays.asList( //
<ide> "Langford Exch Via Atkins - Theits Lk", //
<ide> "Langford Exch Via Atkins" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // LANGFORD EXCHANGE - CLOCKWISE
<ide> if (Arrays.asList( //
<ide> "Metchosin" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "Happy Valley To Colwood Exch", //
<ide> "Happy Valley" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // THETIS HEIGHTS - NORTH
<ide> if (Arrays.asList( //
<ide> "Thetis Heights Via Florence Lake" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // LANGFORD EXCHANGE - SOUTH
<ide> if (Arrays.asList( //
<ide> "Langford Exch" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> } else if (mTrip.getRouteId() == 57L) {
<ide> if (gTrip.getDirectionId() == 0) { // THETIS HEIGHTS - NORTH
<ide> if (Arrays.asList( //
<add> "Thetis Heights Via Millstream", //
<ide> "Theits Heights Via Millstream" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // LANGFORD EXCHANGE - SOUTH
<ide> if (Arrays.asList( //
<ide> "Langford Exch" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 1) { // GOLDSTREAM MEADOWS - OUTBOUND
<ide> if (Arrays.asList( //
<ide> "Goldstream Mdws" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.OUTBOUND);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.OUTBOUND);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "Triangle Mtn Via Royal Bay", //
<ide> "Triangle Mtn" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "Wishart Via Royal Bay", //
<ide> "Wishart" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // SOOKE - WEST
<ide> if (Arrays.asList( //
<ide> "Sooke" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
<ide> "Langford - Jacklin/Station", //
<ide> "Langford Exch To 50 Downtown", //
<ide> "Downtown" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // OTTER POINT - WEST
<ide> if (Arrays.asList( //
<ide> "Otter Point" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> }
<ide> "East Sooke To 17 Mile House", //
<ide> "East Sooke To Langford", //
<ide> "East Sooke To Sooke" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 1) { // SOOKE - ????
<ide> if (Arrays.asList( //
<ide> "East Sooke" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // SOOKE - WEST
<ide> if (Arrays.asList( //
<ide> "Sooke Via Westhills" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.WEST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.WEST);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - EAST
<ide> if (Arrays.asList( //
<ide> "Downtown Via Westhills" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.EAST);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.EAST);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // SWARTZ BAY FERRY - NORTH
<ide> if (Arrays.asList( //
<ide> "Swartz Bay Ferry Via Hwy #17" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
<ide> if (Arrays.asList( //
<ide> "To Gorge Only Via Hwy #17", //
<ide> "Downtown Via Hwy #17" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // SWARTZ BAY FERRY - NORTH
<ide> if (Arrays.asList( //
<ide> "Swartz Bay Ferry Via West Sidney" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
<ide> if (Arrays.asList( //
<ide> "Downtown" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "McDonald Park Via Saanichton", //
<ide> "Swartz Bay Ferry Via Saanichton" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
<ide> if (Arrays.asList( //
<ide> "McTavish Exch", //
<ide> "Downtown" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> "To Keating Only", //
<ide> "Saanichton Exch Via Verdier", //
<ide> "Saanichton Exch" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // DOWNTOWN - SOUTH
<ide> "Royal Oak Exch To 30 Downtown", //
<ide> "Royal Oak Exch To 31 Downtown", //
<ide> "Downtown" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // SWARTZ BAY FERRY - NORTH
<ide> if (Arrays.asList( //
<ide> "Swartz Bay Ferry Non-Stop" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // UVIC - SOUTH
<ide> if (Arrays.asList( //
<ide> "UVic - Via Express" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> if (Arrays.asList( //
<ide> "To Sidney Only", //
<ide> "Swartz Bay Ferry" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // BRENTWOOD - SOUTH
<ide> "Saanichton Exch", //
<ide> "Brentwood To Verdier Only", //
<ide> "Brentwood" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // SIDNEY - NORTH
<ide> if (Arrays.asList( //
<ide> "Sidney Via Stautw" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // SAANICHTON - SOUTH
<ide> if (Arrays.asList( //
<ide> "To Brentwood Via Stautw", //
<ide> "Saanichton Exch Via Stautw" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // SIDNEY - NORTH
<ide> if (Arrays.asList( //
<ide> "Sidney Via West Saanich" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // ROYAL OAK - SOUTH
<ide> if (Arrays.asList( //
<ide> "Royal Oak Exch Via West Saanich" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // NORTH SAANICH - CLOCKWISE
<ide> if (Arrays.asList( //
<ide> "North Saanich" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // NORTH SAANICH - CLOCKWISE
<ide> if (Arrays.asList( //
<ide> "North Saanich" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.CLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.CLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // SIDNEY - NORTH
<ide> if (Arrays.asList( //
<ide> "Sidney" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // SAANICHTON - SOUTH
<ide> if (Arrays.asList( //
<ide> "Dean Park Via Airport To Saanichton" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<ide> return;
<ide> }
<ide> }
<ide> if (gTrip.getDirectionId() == 0) { // SIDNEY - NORTH
<ide> if (Arrays.asList( //
<ide> "Sidney" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.NORTH);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.NORTH);
<ide> return;
<ide> }
<ide> } else if (gTrip.getDirectionId() == 1) { // AIRPORT - SOUTH
<ide> if (Arrays.asList( //
<ide> "Airport" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.SOUTH);
<del> return;
<del> }
<del> }
<del> } else if (mTrip.getRouteId() == 11_802L) { // ARB
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.SOUTH);
<add> return;
<add> }
<add> }
<add> } else if (mTrip.getRouteId() == 999L) { // ARB
<ide> if (isGoodEnoughAccepted()) { // TODO check
<ide> if (gTrip.getDirectionId() == 1) { // ??? - ????
<ide> if (Arrays.asList( //
<ide> "Shuttle Bus" //
<del> ).contains(gTrip.getTripHeadsign())) {
<del> mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), StrategicMappingCommons.COUNTERCLOCKWISE);
<add> ).contains(tripHeadsign)) {
<add> mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), StrategicMappingCommons.COUNTERCLOCKWISE);
<ide> return;
<ide> }
<ide> }
<ide> } else if (mTrip.getRouteId() == 3L) {
<ide> if (Arrays.asList( //
<ide> DOWNTOWN, // <>
<del> R_JUBILEE, //
<add> "10 " + R_JUBILEE, //
<ide> JAMES_BAY // ++
<ide> ).containsAll(headsignsValues)) {
<ide> mTrip.setHeadsignString(JAMES_BAY, mTrip.getHeadsignId());
<ide> }
<ide> } else if (mTrip.getRouteId() == 30L) {
<ide> if (Arrays.asList( //
<del> SAANICHTON, //
<add> "75 " + SAANICHTON, //
<ide> ROYAL_OAK_EXCH //
<ide> ).containsAll(headsignsValues)) {
<ide> mTrip.setHeadsignString(ROYAL_OAK_EXCH, mTrip.getHeadsignId());
<ide> }
<ide> } else if (mTrip.getRouteId() == 31L) {
<ide> if (Arrays.asList( //
<del> SAANICHTON, //
<add> "75 " + SAANICHTON, //
<ide> ROYAL_OAK_EXCH //
<ide> ).containsAll(headsignsValues)) {
<ide> mTrip.setHeadsignString(ROYAL_OAK_EXCH, mTrip.getHeadsignId());
<ide> }
<ide> } else if (mTrip.getRouteId() == 50L) {
<ide> if (Arrays.asList( //
<del> LANGFORD, //
<del> SOOKE //
<add> "61 " + SOOKE, //
<add> LANGFORD //
<ide> ).containsAll(headsignsValues)) {
<ide> mTrip.setHeadsignString(LANGFORD, mTrip.getHeadsignId());
<ide> return true;
<ide> } else if (mTrip.getRouteId() == 61L) {
<ide> if (Arrays.asList( //
<ide> LANGFORD, //
<add> "50 " + DOWNTOWN, //
<ide> DOWNTOWN //
<ide> ).containsAll(headsignsValues)) {
<ide> mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
<ide> }
<ide> } else if (mTrip.getRouteId() == 64L) {
<ide> if (Arrays.asList( //
<del> MILE_HOUSE, //
<add> "17 " + MILE_HOUSE, //
<ide> LANGFORD, //
<ide> SOOKE //
<ide> ).containsAll(headsignsValues)) {
<ide> return true;
<ide> } else if (Arrays.asList( //
<ide> ROYAL_OAK_EXCH, //
<add> "30 " + DOWNTOWN, //
<add> "31 " + DOWNTOWN, //
<ide> DOWNTOWN //
<ide> ).containsAll(headsignsValues)) {
<ide> mTrip.setHeadsignString(DOWNTOWN, mTrip.getHeadsignId());
<ide>
<ide> private static final Pattern HEIGHTS = Pattern.compile("((^|\\W){1}(Hghts)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
<ide> private static final String HEIGHTS_REPLACEMENT = "$2Hts$4";
<del>
<del> private static final Pattern STARTS_WITH_NUMBER = Pattern.compile("(^[\\d]+)", Pattern.CASE_INSENSITIVE);
<ide>
<ide> private static final Pattern ENDS_WITH_EXPRESS = Pattern.compile("( express.*$)", Pattern.CASE_INSENSITIVE);
<ide>
<ide> tripHeadsign = ENDS_WITH_VIA.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
<ide> tripHeadsign = STARTS_WITH_TO.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
<ide> tripHeadsign = ENDS_WITH_EXPRESS.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
<del> tripHeadsign = STARTS_WITH_NUMBER.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
<ide> tripHeadsign = STARTS_WITH_TO_.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
<ide> tripHeadsign = ENDS_WITH_NON_STOP.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
<ide> tripHeadsign = ENDS_WITH_ONLY.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY); |
|
JavaScript | mit | f9d071838b534631e94bff18b3b507f8002d02ba | 0 | onebytegone/banknote-client,onebytegone/banknote-client | var $ = require('jquery');
(function() {
var filterSymbol = function(value) {
return value.replace(/[^\d.]/g, '');
};
$(document).on('focus', '.jsCurrency', function() {
var elem = $(this),
val = filterSymbol(elem.val());
// If zero value, clear input for user
if (parseFloat(val) === 0) {
val = '';
}
elem.val(val);
});
$(document).on('blur', '.jsCurrency', function() {
var elem = $(this),
val = elem.val();
// Trim to float, e.g. 2112.123, then format to
// 2 decimal places, e.g. 212.12, rounding as needed.
val = parseFloat(val).toFixed(2);
elem.val('$' + val);
});
})();
| src/common/library/CurrencyInputStyler.js | var $ = require('jquery');
(function() {
var filterSymbol = function(value) {
return value.replace(/[^\d.]/g, '');
};
$(document).on('focus', '.jsCurrency', function() {
var elem = $(this);
elem.val(filterSymbol(elem.val()));
});
$(document).on('blur', '.jsCurrency', function() {
var elem = $(this),
val = elem.val();
// Trim to float, e.g. 2112.123, then format to
// 2 decimal places, e.g. 212.12, rounding as needed.
val = parseFloat(val).toFixed(2);
elem.val('$' + val);
});
})();
| Clear field on focus when no value
| src/common/library/CurrencyInputStyler.js | Clear field on focus when no value | <ide><path>rc/common/library/CurrencyInputStyler.js
<ide> };
<ide>
<ide> $(document).on('focus', '.jsCurrency', function() {
<del> var elem = $(this);
<del> elem.val(filterSymbol(elem.val()));
<add> var elem = $(this),
<add> val = filterSymbol(elem.val());
<add>
<add> // If zero value, clear input for user
<add> if (parseFloat(val) === 0) {
<add> val = '';
<add> }
<add>
<add> elem.val(val);
<ide> });
<ide>
<ide> $(document).on('blur', '.jsCurrency', function() { |
|
Java | agpl-3.0 | f8079199c313942cf83efe134c16aca629a9336a | 0 | cryptomator/cryptofs | /*******************************************************************************
* Copyright (c) 2016 Sebastian Stenzel and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the accompanying LICENSE.txt.
*
* Contributors:
* Sebastian Stenzel - initial API and implementation
*******************************************************************************/
package org.cryptomator.cryptofs;
import com.google.common.base.Throwables;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.io.BaseEncoding;
import org.cryptomator.cryptolib.common.MessageDigestSupplier;
import javax.inject.Inject;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.ByteBuffer;
import java.nio.channels.SeekableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.time.Duration;
import java.util.Arrays;
import java.util.concurrent.ExecutionException;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.cryptomator.cryptofs.common.Constants.DEFLATED_FILE_SUFFIX;
import static org.cryptomator.cryptofs.common.Constants.INFLATED_FILE_NAME;
@CryptoFileSystemScoped
public class LongFileNameProvider {
public static final int MAX_FILENAME_BUFFER_SIZE = 10 * 1024; // no sane person gives a file a 10kb long name.
private static final BaseEncoding BASE64 = BaseEncoding.base64Url();
private static final Duration MAX_CACHE_AGE = Duration.ofMinutes(1);
private final ReadonlyFlag readonlyFlag;
private final LoadingCache<Path, String> longNames; // Maps from c9s paths to inflated filenames
@Inject
public LongFileNameProvider(ReadonlyFlag readonlyFlag) {
this.readonlyFlag = readonlyFlag;
this.longNames = CacheBuilder.newBuilder().expireAfterAccess(MAX_CACHE_AGE).build(new Loader());
}
private class Loader extends CacheLoader<Path, String> {
@Override
public String load(Path c9sPath) throws IOException {
Path longNameFile = c9sPath.resolve(INFLATED_FILE_NAME);
try (SeekableByteChannel ch = Files.newByteChannel(longNameFile, StandardOpenOption.READ)) {
if (ch.size() > MAX_FILENAME_BUFFER_SIZE) {
throw new IOException("Unexpectedly large file: " + longNameFile);
}
assert ch.size() <= MAX_FILENAME_BUFFER_SIZE;
ByteBuffer buf = ByteBuffer.allocate((int) ch.size());
ch.read(buf);
buf.flip();
return UTF_8.decode(buf).toString();
}
}
}
public boolean isDeflated(String possiblyDeflatedFileName) {
return possiblyDeflatedFileName.endsWith(DEFLATED_FILE_SUFFIX);
}
public String inflate(Path c9sPath) throws IOException {
try {
return longNames.get(c9sPath);
} catch (ExecutionException e) {
Throwables.throwIfInstanceOf(e.getCause(), IOException.class);
throw new IllegalStateException("Unexpected exception", e);
}
}
public DeflatedFileName deflate(Path c9rPath) {
String longFileName = c9rPath.getFileName().toString();
byte[] longFileNameBytes = longFileName.getBytes(UTF_8);
byte[] hash = MessageDigestSupplier.SHA1.get().digest(longFileNameBytes);
String shortName = BASE64.encode(hash) + DEFLATED_FILE_SUFFIX;
Path c9sPath = c9rPath.resolveSibling(shortName);
longNames.put(c9sPath, longFileName);
return new DeflatedFileName(c9sPath, longFileName, readonlyFlag);
}
public static class DeflatedFileName {
public final Path c9sPath;
public final String longName;
private final ReadonlyFlag readonlyFlag;
DeflatedFileName(Path c9sPath, String longName, ReadonlyFlag readonlyFlag) {
this.c9sPath = c9sPath;
this.longName = longName;
this.readonlyFlag = readonlyFlag;
}
public void persist() {
readonlyFlag.assertWritable();
try {
persistInternal();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private void persistInternal() throws IOException {
Path longNameFile = c9sPath.resolve(INFLATED_FILE_NAME);
Files.createDirectories(c9sPath);
Files.write(longNameFile,UTF_8.encode(longName).array()); //WRITE, CREATE, TRUNCATE_EXISTING
}
}
}
| src/main/java/org/cryptomator/cryptofs/LongFileNameProvider.java | /*******************************************************************************
* Copyright (c) 2016 Sebastian Stenzel and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the accompanying LICENSE.txt.
*
* Contributors:
* Sebastian Stenzel - initial API and implementation
*******************************************************************************/
package org.cryptomator.cryptofs;
import com.google.common.base.Throwables;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.io.BaseEncoding;
import org.cryptomator.cryptolib.common.MessageDigestSupplier;
import javax.inject.Inject;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.ByteBuffer;
import java.nio.channels.SeekableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.time.Duration;
import java.util.Arrays;
import java.util.concurrent.ExecutionException;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.cryptomator.cryptofs.common.Constants.DEFLATED_FILE_SUFFIX;
import static org.cryptomator.cryptofs.common.Constants.INFLATED_FILE_NAME;
@CryptoFileSystemScoped
public class LongFileNameProvider {
public static final int MAX_FILENAME_BUFFER_SIZE = 10 * 1024; // no sane person gives a file a 10kb long name.
private static final BaseEncoding BASE64 = BaseEncoding.base64Url();
private static final Duration MAX_CACHE_AGE = Duration.ofMinutes(1);
private final ReadonlyFlag readonlyFlag;
private final LoadingCache<Path, String> longNames; // Maps from c9s paths to inflated filenames
@Inject
public LongFileNameProvider(ReadonlyFlag readonlyFlag) {
this.readonlyFlag = readonlyFlag;
this.longNames = CacheBuilder.newBuilder().expireAfterAccess(MAX_CACHE_AGE).build(new Loader());
}
private class Loader extends CacheLoader<Path, String> {
@Override
public String load(Path c9sPath) throws IOException {
Path longNameFile = c9sPath.resolve(INFLATED_FILE_NAME);
try (SeekableByteChannel ch = Files.newByteChannel(longNameFile, StandardOpenOption.READ)) {
if (ch.size() > MAX_FILENAME_BUFFER_SIZE) {
throw new IOException("Unexpectedly large file: " + longNameFile);
}
assert ch.size() <= MAX_FILENAME_BUFFER_SIZE;
ByteBuffer buf = ByteBuffer.allocate((int) ch.size());
ch.read(buf);
buf.flip();
return UTF_8.decode(buf).toString();
}
}
}
public boolean isDeflated(String possiblyDeflatedFileName) {
return possiblyDeflatedFileName.endsWith(DEFLATED_FILE_SUFFIX);
}
public String inflate(Path c9sPath) throws IOException {
try {
return longNames.get(c9sPath);
} catch (ExecutionException e) {
Throwables.throwIfInstanceOf(e.getCause(), IOException.class);
throw new IllegalStateException("Unexpected exception", e);
}
}
public DeflatedFileName deflate(Path c9rPath) {
String longFileName = c9rPath.getFileName().toString();
byte[] longFileNameBytes = longFileName.getBytes(UTF_8);
byte[] hash = MessageDigestSupplier.SHA1.get().digest(longFileNameBytes);
String shortName = BASE64.encode(hash) + DEFLATED_FILE_SUFFIX;
Path c9sPath = c9rPath.resolveSibling(shortName);
longNames.put(c9sPath, longFileName);
return new DeflatedFileName(c9sPath, longFileName, readonlyFlag);
}
public static class DeflatedFileName {
public final Path c9sPath;
public final String longName;
private final ReadonlyFlag readonlyFlag;
DeflatedFileName(Path c9sPath, String longName, ReadonlyFlag readonlyFlag) {
this.c9sPath = c9sPath;
this.longName = longName;
this.readonlyFlag = readonlyFlag;
}
public void persist() {
readonlyFlag.assertWritable();
try {
persistInternal();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private void persistInternal() throws IOException {
Path longNameFile = c9sPath.resolve(INFLATED_FILE_NAME);
Files.createDirectories(c9sPath);
try (WritableByteChannel ch = Files.newByteChannel(longNameFile, StandardOpenOption.WRITE, StandardOpenOption.CREATE)) {
ch.write(UTF_8.encode(longName));
}
}
}
}
| simplify code
| src/main/java/org/cryptomator/cryptofs/LongFileNameProvider.java | simplify code | <ide><path>rc/main/java/org/cryptomator/cryptofs/LongFileNameProvider.java
<ide> private void persistInternal() throws IOException {
<ide> Path longNameFile = c9sPath.resolve(INFLATED_FILE_NAME);
<ide> Files.createDirectories(c9sPath);
<del> try (WritableByteChannel ch = Files.newByteChannel(longNameFile, StandardOpenOption.WRITE, StandardOpenOption.CREATE)) {
<del> ch.write(UTF_8.encode(longName));
<del> }
<add> Files.write(longNameFile,UTF_8.encode(longName).array()); //WRITE, CREATE, TRUNCATE_EXISTING
<ide> }
<ide> }
<ide> |
|
JavaScript | apache-2.0 | 17146f5dcbdbc738d5b464f4a08528af34734a0b | 0 | boxed/grid,uberVU/grid,hootsuite/grid,zaeem/grid,uberVU/grid,hootsuite/grid,thenaughtychild/grid,thenaughtychild/grid,gasparfm/grid,gasparfm/grid,zaeem/grid | ;(function($, window, document, undefined) {
var DraggableGridList = function(element, options) {
this.options = $.extend({}, this.defaults, options);
this.$element = $(element);
this._init();
this._bindEvents();
};
DraggableGridList.prototype.defaults = {
rows: 5,
widthHeightRatio: 1
};
DraggableGridList.prototype.destroy = function() {
this._unbindEvents();
};
DraggableGridList.prototype.resize = function(rows) {
if (rows) {
this.options.rows = rows;
}
this._createGridSnapshot();
this.gridList.resizeGrid(this.options.rows);
this._updateGridSnapshot();
this._calculateCellSize();
this.render();
};
DraggableGridList.prototype.resizeItem = function(element, size) {
this._createGridSnapshot();
this.gridList.resizeItem(this._getItemByElement(element), size);
this._updateGridSnapshot();
this.render();
};
DraggableGridList.prototype.render = function() {
this._applySizeToItems();
this._applyPositionToItems();
};
DraggableGridList.prototype._bindMethod = function(fn) {
/**
* Bind prototype method to instance scope (similar to CoffeeScript's fat
* arrow)
*/
var that = this;
return function() {
return fn.apply(that, arguments);
};
};
DraggableGridList.prototype._init = function() {
// Read items and their meta data. Ignore other list elements (like the
// position highlight)
this.$items = this.$element.children('li[data-w]');
this.items = this._generateItemsFromDOM();
// Used to highlight a position an element will land on upon drop
this.$positionHighlight = this.$element.find('.position-highlight').hide();
this._initGridList();
this._calculateCellSize();
this.render();
// Init Draggable JQuery UI plugin for each of the list items
// http://api.jqueryui.com/draggable/
this.$items.draggable({
zIndex: this.items.length,
scroll: false
});
};
DraggableGridList.prototype._initGridList = function() {
// Create instance of GridList (decoupled lib for handling the grid
// positioning and sorting post-drag and dropping)
this.gridList = new GridList(this.items, {rows: this.options.rows});
};
DraggableGridList.prototype._bindEvents = function() {
this._onStart = this._bindMethod(this._onStart);
this._onDrag = this._bindMethod(this._onDrag);
this._onStop = this._bindMethod(this._onStop);
this.$items.on('dragstart', this._onStart);
this.$items.on('drag', this._onDrag);
this.$items.on('dragstop', this._onStop);
};
DraggableGridList.prototype._unbindEvents = function() {
this.$items.off('dragstart', this._onStart);
this.$items.off('drag', this._onDrag);
this.$items.off('dragstop', this._onStop);
};
DraggableGridList.prototype._onStart = function(event, ui) {
// Create a deep copy of the items; we use them to revert the item
// positions after each drag change, making an entire drag operation less
// distructable
this._createGridSnapshot();
// Since dragging actually alters the grid, we need to establish the number
// of cols (+1 extra) before the drag starts
this._maxGridCols = this.gridList.grid.length;
};
DraggableGridList.prototype._onDrag = function(event, ui) {
var item = this._getItemByElement(ui.helper),
newPosition = this._snapItemPositionToGrid(item);
if (this._dragPositionChanged(newPosition)) {
this._previousDragPosition = newPosition;
// Regenerate the grid with the positions from when the drag started
GridList.cloneItems(this._items, this.items);
this.gridList.generateGrid();
// Since the items list is a deep copy, we need to fetch the item
// corresponding to this drag action again
item = this._getItemByElement(ui.helper);
this.gridList.moveItemToPosition(item, newPosition);
// Visually update item positions and highlight shape
this._applyPositionToItems();
this._highlightPositionForItem(item);
}
};
DraggableGridList.prototype._onStop = function(event, ui) {
this._updateGridSnapshot();
this._previousDragPosition = null;
// HACK: jQuery.draggable removes this class after the dragstop callback,
// and we need it removed before the drop, to re-enable CSS transitions
$(ui.helper).removeClass('ui-draggable-dragging');
this._applyPositionToItems();
this._removePositionHighlight();
};
DraggableGridList.prototype._generateItemsFromDOM = function() {
/**
* Generate the structure of items used by the GridList lib, using the DOM
* data of the children of the targeted element. The items will have an
* additional reference to the initial DOM element attached, in order to
* trace back to it and re-render it once its properties are changed by the
* GridList lib
*/
var _this = this,
items = [],
item;
this.$items.each(function(i, element) {
item = {
$element: $(element)
};
$.extend(item, $(element).data());
items.push(item);
});
return items;
};
DraggableGridList.prototype._getItemByElement = function(element) {
// XXX: this could be optimized by storing the item reference inside the
// meta data of the DOM element
for (var i = 0; i < this.items.length; i++) {
if (this.items[i].$element.is(element)) {
return this.items[i];
}
}
};
DraggableGridList.prototype._calculateCellSize = function() {
this._cellHeight = Math.floor(this.$element.height() / this.options.rows);
this._cellWidth = this._cellHeight * this.options.widthHeightRatio;
if (this.options.heightToFontSizeRatio) {
this._fontSize = this._cellHeight * this.options.heightToFontSizeRatio;
}
};
DraggableGridList.prototype._getItemWidth = function(item) {
return item.w * this._cellWidth;
};
DraggableGridList.prototype._getItemHeight = function(item) {
return item.h * this._cellHeight;
};
DraggableGridList.prototype._applySizeToItems = function() {
for (var i = 0; i < this.items.length; i++) {
this.items[i].$element.css({
width: this._getItemWidth(this.items[i]),
height: this._getItemHeight(this.items[i])
});
}
if (this.options.heightToFontSizeRatio) {
this.$items.css('font-size', this._fontSize);
}
};
DraggableGridList.prototype._applyPositionToItems = function() {
// TODO: Implement group separators
for (var i = 0; i < this.items.length; i++) {
// Don't interfere with the positions of the dragged items
if (this.items[i].move) {
continue;
}
this.items[i].$element.css({
left: this.items[i].x * this._cellWidth,
top: this.items[i].y * this._cellHeight
});
}
// Update the width of the entire grid container with an extra column on
// the right for extra dragging room
this.$element.width((this.gridList.grid.length + 1) * this._cellWidth);
};
DraggableGridList.prototype._dragPositionChanged = function(newPosition) {
if (!this._previousDragPosition) {
return true;
}
return (newPosition[0] != this._previousDragPosition[0] ||
newPosition[1] != this._previousDragPosition[1]);
};
DraggableGridList.prototype._snapItemPositionToGrid = function(item) {
var position = item.$element.position(),
row,
col;
position[0] -= this.$element.position().left;
col = Math.round(position.left / this._cellWidth);
row = Math.round(position.top / this._cellHeight);
// Keep item position within the grid and don't let the item create more
// than one extra column
col = Math.max(col, 0);
row = Math.max(row, 0);
col = Math.min(col, this._maxGridCols);
row = Math.min(row, this.options.rows - item.h);
return [col, row];
};
DraggableGridList.prototype._highlightPositionForItem = function(item) {
this.$positionHighlight.css({
width: this._getItemWidth(item),
height: this._getItemHeight(item),
left: item.x * this._cellWidth,
top: item.y * this._cellHeight
}).show();
if (this.options.heightToFontSizeRatio) {
this.$positionHighlight.css('font-size', this._fontSize);
}
};
DraggableGridList.prototype._removePositionHighlight = function() {
this.$positionHighlight.hide();
};
DraggableGridList.prototype._createGridSnapshot = function() {
this._items = GridList.cloneItems(this.items);
};
DraggableGridList.prototype._updateGridSnapshot = function() {
// Notify the user with the items that changed since the previous snapshot
this._triggerOnChange();
GridList.cloneItems(this.items, this._items);
};
DraggableGridList.prototype._triggerOnChange = function() {
if (typeof(this.options.onChange) != 'function') {
return;
}
this.options.onChange.call(
this, this.gridList.getChangedItems(this._items, '$element'));
};
$.fn.gridList = function(options) {
if (!window.GridList) {
throw new Error('GridList lib required');
}
var instance,
method,
args;
if (typeof(options) == 'string') {
method = options;
args = Array.prototype.slice.call(arguments, 1);
}
this.each(function() {
instance = $(this).data('_gridList');
// The plugin call be called with no method on an existing GridList
// instance to re-initialize it
if (instance && !method) {
instance.destroy();
instance = null;
}
if (!instance) {
instance = new DraggableGridList(this, options);
$(this).data('_gridList', instance);
}
if (method) {
instance[method].apply(instance, args);
}
});
// Maintain jQuery chain
return this;
};
})(jQuery, window, document);
| lib/jquery.gridList.js | ;(function($, window, document, undefined) {
var DraggableGridList = function(element, options) {
this.options = $.extend({}, this.defaults, options);
this.$element = $(element);
this._init();
this._bindEvents();
};
DraggableGridList.prototype.defaults = {
rows: 5,
widthHeightRatio: 1
};
DraggableGridList.prototype.destroy = function() {
this._unbindEvents();
};
DraggableGridList.prototype.resize = function(rows) {
if (rows) {
this.options.rows = rows;
}
this._createGridSnapshot();
this.gridList.resizeGrid(this.options.rows);
this._updateGridSnapshot();
this._calculateCellSize();
this.render();
};
DraggableGridList.prototype.resizeItem = function(element, size) {
this._createGridSnapshot();
this.gridList.resizeItem(this._getItemByElement(element), size);
this._updateGridSnapshot();
this.render();
};
DraggableGridList.prototype.render = function() {
this._applySizeToItems();
this._applyPositionToItems();
};
DraggableGridList.prototype._bindMethod = function(fn) {
/**
* Bind prototype method to instance scope (similar to CoffeeScript's fat
* arrow)
*/
var that = this;
return function() {
return fn.apply(that, arguments);
};
};
DraggableGridList.prototype._init = function() {
// Read items and their meta data. Ignore other list elements (like the
// position highlight)
this.$items = this.$element.children('li[data-w]');
this.items = this._generateItemsFromDOM();
// Used to highlight a position an element will land on upon drop
this.$positionHighlight = this.$element.find('.position-highlight').hide();
this._initGridList();
this._calculateCellSize();
this.render();
// Init Draggable JQuery UI plugin for each of the list items
// http://api.jqueryui.com/draggable/
this.$items.draggable({
zIndex: this.items.length,
scroll: false
});
};
DraggableGridList.prototype._initGridList = function() {
// Create instance of GridList (decoupled lib for handling the grid
// positioning and sorting post-drag and dropping)
this.gridList = new GridList(this.items, {rows: this.options.rows});
};
DraggableGridList.prototype._bindEvents = function() {
this._onStart = this._bindMethod(this._onStart);
this._onDrag = this._bindMethod(this._onDrag);
this._onStop = this._bindMethod(this._onStop);
this.$items.on('dragstart', this._onStart);
this.$items.on('drag', this._onDrag);
this.$items.on('dragstop', this._onStop);
};
DraggableGridList.prototype._unbindEvents = function() {
this.$items.off('dragstart', this._onStart);
this.$items.off('drag', this._onDrag);
this.$items.off('dragstop', this._onStop);
};
DraggableGridList.prototype._onStart = function(event, ui) {
// Create a deep copy of the items; we use them to revert the item
// positions after each drag change, making an entire drag operation less
// distructable
this._createGridSnapshot();
// Since dragging actually alters the grid, we need to establish the number
// of cols (+1 extra) before the drag starts
this._maxGridCols = this.gridList.grid.length;
};
DraggableGridList.prototype._onDrag = function(event, ui) {
var item = this._getItemByElement(ui.helper),
newPosition = this._snapItemPositionToGrid(item);
if (this._dragPositionChanged(newPosition)) {
this._previousDragPosition = newPosition;
// Regenerate the grid with the positions from when the drag started
GridList.cloneItems(this._items, this.items);
this.gridList.generateGrid();
// Since the items list is a deep copy, we need to fetch the item
// corresponding to this drag action again
item = this._getItemByElement(ui.helper);
this.gridList.moveItemToPosition(item, newPosition);
// Visually update item positions and highlight shape
this._applyPositionToItems();
this._highlightPositionForItem(item);
}
};
DraggableGridList.prototype._onStop = function(event, ui) {
this._updateGridSnapshot();
this._previousDragPosition = null;
// HACK: jQuery.draggable removes this class after the dragstop callback,
// and we need it removed before the drop, to re-enable CSS transitions
$(ui.helper).removeClass('ui-draggable-dragging');
this._applyPositionToItems();
this._removePositionHighlight();
};
DraggableGridList.prototype._generateItemsFromDOM = function() {
/**
* Generate the structure of items used by the GridList lib, using the DOM
* data of the children of the targeted element. The items will have an
* additional reference to the initial DOM element attached, in order to
* trace back to it and re-render it once its properties are changed by the
* GridList lib
*/
var _this = this,
items = [],
item;
this.$items.each(function(i, element) {
item = {
$element: $(element)
};
$.extend(item, $(element).data());
items.push(item);
});
return items;
};
DraggableGridList.prototype._getItemByElement = function(element) {
// XXX: this could be optimized by storing the item reference inside the
// meta data of the DOM element
for (var i = 0; i < this.items.length; i++) {
if (this.items[i].$element.is(element)) {
return this.items[i];
}
}
};
DraggableGridList.prototype._calculateCellSize = function() {
this._cellHeight = Math.floor(this.$element.height() / this.options.rows);
this._cellWidth = this._cellHeight * this.options.widthHeightRatio;
if (this.options.heightToFontSizeRatio) {
this._fontSize = this._cellHeight * this.options.heightToFontSizeRatio;
}
};
DraggableGridList.prototype._getItemWidth = function(item) {
return item.w * this._cellWidth;
};
DraggableGridList.prototype._getItemHeight = function(item) {
return item.h * this._cellHeight;
};
DraggableGridList.prototype._applySizeToItems = function() {
for (var i = 0; i < this.items.length; i++) {
this.items[i].$element.css({
width: this._getItemWidth(this.items[i]),
height: this._getItemHeight(this.items[i])
});
}
if (this.options.heightToFontSizeRatio) {
this.$items.css('font-size', this._fontSize);
}
};
DraggableGridList.prototype._applyPositionToItems = function() {
// TODO: Implement group separators
for (var i = 0; i < this.items.length; i++) {
// Don't interfere with the positions of the dragged items
if (this.items[i].move) {
continue;
}
this.items[i].$element.css({
left: this.items[i].x * this._cellWidth,
top: this.items[i].y * this._cellHeight
});
}
// Update the width of the entire grid container with an extra column on
// the right for extra dragging room
this.$element.width((this.gridList.grid.length + 1) * this._cellWidth);
};
DraggableGridList.prototype._dragPositionChanged = function(newPosition) {
if (!this._previousDragPosition) {
return true;
}
return (newPosition[0] != this._previousDragPosition[0] ||
newPosition[1] != this._previousDragPosition[1]);
};
DraggableGridList.prototype._snapItemPositionToGrid = function(item) {
var position = item.$element.position(),
row,
col;
position[0] -= this.$element.position().left;
col = Math.round(position.left / this._cellWidth);
row = Math.round(position.top / this._cellHeight);
// Keep item position within the grid and don't let the item create more
// than one extra column
col = Math.max(col, 0);
row = Math.max(row, 0);
col = Math.min(col, this._maxGridCols);
row = Math.min(row, this.options.rows - item.h);
return [col, row];
};
DraggableGridList.prototype._highlightPositionForItem = function(item) {
this.$positionHighlight.css({
width: this._getItemWidth(item),
height: this._getItemHeight(item),
left: item.x * this._cellWidth,
top: item.y * this._cellHeight
}).show();
if (this.options.heightToFontSizeRatio) {
this.$positionHighlight.css('font-size', this._fontSize);
}
};
DraggableGridList.prototype._removePositionHighlight = function() {
this.$positionHighlight.hide();
};
DraggableGridList.prototype._createGridSnapshot = function() {
this._items = GridList.cloneItems(this.items);
};
DraggableGridList.prototype._updateGridSnapshot = function() {
// Notify the user with the items that changed since the previous snapshot
this._callUserCallback();
GridList.cloneItems(this.items, this._items);
};
DraggableGridList.prototype._callUserCallback = function() {
if (typeof(this.options.onChange) != 'function') {
return;
}
this.options.onChange.call(
this, this.gridList.getChangedItems(this._items, '$element'));
};
$.fn.gridList = function(options) {
if (!window.GridList) {
throw new Error('GridList lib required');
}
var instance,
method,
args;
if (typeof(options) == 'string') {
method = options;
args = Array.prototype.slice.call(arguments, 1);
}
this.each(function() {
instance = $(this).data('_gridList');
// The plugin call be called with no method on an existing GridList
// instance to re-initialize it
if (instance && !method) {
instance.destroy();
instance = null;
}
if (!instance) {
instance = new DraggableGridList(this, options);
$(this).data('_gridList', instance);
}
if (method) {
instance[method].apply(instance, args);
}
});
// Maintain jQuery chain
return this;
};
})(jQuery, window, document);
| Rename _callUserCallback as _triggerOnChange #8
| lib/jquery.gridList.js | Rename _callUserCallback as _triggerOnChange #8 | <ide><path>ib/jquery.gridList.js
<ide>
<ide> DraggableGridList.prototype._updateGridSnapshot = function() {
<ide> // Notify the user with the items that changed since the previous snapshot
<del> this._callUserCallback();
<add> this._triggerOnChange();
<ide> GridList.cloneItems(this.items, this._items);
<ide> };
<ide>
<del> DraggableGridList.prototype._callUserCallback = function() {
<add> DraggableGridList.prototype._triggerOnChange = function() {
<ide> if (typeof(this.options.onChange) != 'function') {
<ide> return;
<ide> } |
|
Java | mit | 2078e2834cb0651b3c620a8c338a5bf8990a00f9 | 0 | jaquadro/StorageDrawers,codewarrior0/StorageDrawers,jaquadro/StorageDrawers,bloodmc/StorageDrawers | package com.jaquadro.minecraft.storagedrawers.block.tile;
import com.jaquadro.minecraft.storagedrawers.api.inventory.IDrawerInventory;
import com.jaquadro.minecraft.storagedrawers.api.storage.IDrawer;
import com.jaquadro.minecraft.storagedrawers.api.storage.IDrawerGroup;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.ISidedInventory;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.network.NetworkManager;
import net.minecraft.network.Packet;
import net.minecraft.network.play.server.S35PacketUpdateTileEntity;
import net.minecraft.tileentity.TileEntity;
import net.minecraftforge.common.util.Constants;
public class TileEntitySlave extends TileEntity implements IDrawerGroup, ISidedInventory
{
private BlockCoord controllerCoord;
private BlockCoord selfCoord;
private int[] inventorySlots = new int[] { 0 };
public void ensureInitialized () {
if (selfCoord == null) {
selfCoord = new BlockCoord(xCoord, yCoord, zCoord);
markDirty();
}
}
@Override
public void readFromNBT (NBTTagCompound tag) {
super.readFromNBT(tag);
selfCoord = new BlockCoord(xCoord, yCoord, zCoord);
if (tag.hasKey("Controller", Constants.NBT.TAG_COMPOUND)) {
NBTTagCompound ctag = tag.getCompoundTag("Controller");
controllerCoord = new BlockCoord(ctag.getInteger("x"), ctag.getInteger("y"), ctag.getInteger("z"));
}
}
@Override
public void writeToNBT (NBTTagCompound tag) {
super.writeToNBT(tag);
if (controllerCoord != null) {
NBTTagCompound ctag = new NBTTagCompound();
ctag.setInteger("x", controllerCoord.x());
ctag.setInteger("y", controllerCoord.y());
ctag.setInteger("z", controllerCoord.z());
tag.setTag("Controller", ctag);
}
}
@Override
public Packet getDescriptionPacket () {
NBTTagCompound tag = new NBTTagCompound();
writeToNBT(tag);
return new S35PacketUpdateTileEntity(xCoord, yCoord, zCoord, 5, tag);
}
@Override
public void onDataPacket (NetworkManager net, S35PacketUpdateTileEntity pkt) {
readFromNBT(pkt.func_148857_g());
getWorldObj().func_147479_m(xCoord, yCoord, zCoord); // markBlockForRenderUpdate
}
public void bindController (int x, int y, int z) {
if (controllerCoord != null && controllerCoord.x() == x && controllerCoord.y() == y && controllerCoord.z() == z)
return;
controllerCoord = new BlockCoord(x, y, z);
markDirty();
}
public TileEntityController getController () {
if (controllerCoord == null)
return null;
ensureInitialized();
TileEntity te = worldObj.getTileEntity(controllerCoord.x(), controllerCoord.y(), controllerCoord.z());
if (!(te instanceof TileEntityController)) {
controllerCoord = null;
markDirty();
return null;
}
return (TileEntityController)te;
}
@Override
public int[] getAccessibleSlotsFromSide (int side) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return inventorySlots;
return controller.getAccessibleSlotsFromSide(0);
}
@Override
public boolean canInsertItem (int slot, ItemStack stack, int side) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return false;
return controller.canInsertItem(slot, stack, 0);
}
@Override
public boolean canExtractItem (int slot, ItemStack stack, int side) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return false;
return controller.canExtractItem(slot, stack, side);
}
@Override
public int getSizeInventory () {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return 1;
return controller.getSizeInventory();
}
@Override
public ItemStack getStackInSlot (int slot) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return null;
return controller.getStackInSlot(slot);
}
@Override
public ItemStack decrStackSize (int slot, int count) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return null;
return controller.decrStackSize(slot, count);
}
@Override
public ItemStack getStackInSlotOnClosing (int slot) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return null;
return controller.getStackInSlotOnClosing(slot);
}
@Override
public void setInventorySlotContents (int slot, ItemStack stack) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return;
controller.setInventorySlotContents(slot, stack);
}
@Override
public String getInventoryName () {
// TODO
return null;
}
@Override
public boolean hasCustomInventoryName () {
// TODO
return false;
}
@Override
public int getInventoryStackLimit () {
return 64;
}
@Override
public boolean isUseableByPlayer (EntityPlayer player) {
return false;
}
@Override
public void openInventory () {
}
@Override
public void closeInventory () {
}
@Override
public boolean isItemValidForSlot (int slot, ItemStack stack) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return false;
return controller.isItemValidForSlot(slot, stack);
}
@Override
public int getDrawerCount () {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return 0;
return controller.getDrawerCount();
}
@Override
public IDrawer getDrawer (int slot) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return null;
return controller.getDrawer(slot);
}
@Override
public boolean isDrawerEnabled (int slot) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return false;
return controller.isDrawerEnabled(slot);
}
@Override
public IDrawerInventory getDrawerInventory () {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return null;
return controller.getDrawerInventory();
}
@Override
public void markDirty () {
TileEntityController controller = getController();
if (controller != null && controller.isValidSlave(selfCoord))
controller.markDirty();
super.markDirty();
}
@Override
public boolean markDirtyIfNeeded () {
TileEntityController controller = getController();
if (controller != null && controller.isValidSlave(selfCoord))
return controller.markDirtyIfNeeded();
return false;
}
}
| src/com/jaquadro/minecraft/storagedrawers/block/tile/TileEntitySlave.java | package com.jaquadro.minecraft.storagedrawers.block.tile;
import com.jaquadro.minecraft.storagedrawers.api.inventory.IDrawerInventory;
import com.jaquadro.minecraft.storagedrawers.api.storage.IDrawer;
import com.jaquadro.minecraft.storagedrawers.api.storage.IDrawerGroup;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.ISidedInventory;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.network.NetworkManager;
import net.minecraft.network.Packet;
import net.minecraft.network.play.server.S35PacketUpdateTileEntity;
import net.minecraft.tileentity.TileEntity;
import net.minecraftforge.common.util.Constants;
public class TileEntitySlave extends TileEntity implements IDrawerGroup, ISidedInventory
{
private BlockCoord controllerCoord;
private BlockCoord selfCoord;
private int[] inventorySlots = new int[] { 0 };
public void ensureInitialized () {
if (selfCoord == null) {
selfCoord = new BlockCoord(xCoord, yCoord, zCoord);
markDirty();
}
}
@Override
public void readFromNBT (NBTTagCompound tag) {
super.readFromNBT(tag);
selfCoord = new BlockCoord(xCoord, yCoord, zCoord);
if (tag.hasKey("Controller", Constants.NBT.TAG_COMPOUND)) {
NBTTagCompound ctag = tag.getCompoundTag("Controller");
controllerCoord = new BlockCoord(ctag.getInteger("x"), ctag.getInteger("y"), ctag.getInteger("z"));
}
}
@Override
public void writeToNBT (NBTTagCompound tag) {
super.writeToNBT(tag);
if (controllerCoord != null) {
NBTTagCompound ctag = new NBTTagCompound();
ctag.setInteger("x", controllerCoord.x());
ctag.setInteger("y", controllerCoord.y());
ctag.setInteger("z", controllerCoord.z());
tag.setTag("Controller", ctag);
}
}
@Override
public Packet getDescriptionPacket () {
NBTTagCompound tag = new NBTTagCompound();
writeToNBT(tag);
return new S35PacketUpdateTileEntity(xCoord, yCoord, zCoord, 5, tag);
}
@Override
public void onDataPacket (NetworkManager net, S35PacketUpdateTileEntity pkt) {
readFromNBT(pkt.func_148857_g());
getWorldObj().func_147479_m(xCoord, yCoord, zCoord); // markBlockForRenderUpdate
}
public void bindController (int x, int y, int z) {
if (controllerCoord != null && controllerCoord.x() == x && controllerCoord.y() == y && controllerCoord.z() == z)
return;
controllerCoord = new BlockCoord(x, y, z);
markDirty();
}
public TileEntityController getController () {
if (controllerCoord == null)
return null;
TileEntity te = worldObj.getTileEntity(controllerCoord.x(), controllerCoord.y(), controllerCoord.z());
if (!(te instanceof TileEntityController)) {
controllerCoord = null;
markDirty();
return null;
}
return (TileEntityController)te;
}
@Override
public int[] getAccessibleSlotsFromSide (int side) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return inventorySlots;
return controller.getAccessibleSlotsFromSide(0);
}
@Override
public boolean canInsertItem (int slot, ItemStack stack, int side) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return false;
return controller.canInsertItem(slot, stack, 0);
}
@Override
public boolean canExtractItem (int slot, ItemStack stack, int side) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return false;
return controller.canExtractItem(slot, stack, side);
}
@Override
public int getSizeInventory () {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return 1;
return controller.getSizeInventory();
}
@Override
public ItemStack getStackInSlot (int slot) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return null;
return controller.getStackInSlot(slot);
}
@Override
public ItemStack decrStackSize (int slot, int count) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return null;
return controller.decrStackSize(slot, count);
}
@Override
public ItemStack getStackInSlotOnClosing (int slot) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return null;
return controller.getStackInSlotOnClosing(slot);
}
@Override
public void setInventorySlotContents (int slot, ItemStack stack) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return;
controller.setInventorySlotContents(slot, stack);
}
@Override
public String getInventoryName () {
// TODO
return null;
}
@Override
public boolean hasCustomInventoryName () {
// TODO
return false;
}
@Override
public int getInventoryStackLimit () {
return 64;
}
@Override
public boolean isUseableByPlayer (EntityPlayer player) {
return false;
}
@Override
public void openInventory () {
}
@Override
public void closeInventory () {
}
@Override
public boolean isItemValidForSlot (int slot, ItemStack stack) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return false;
return controller.isItemValidForSlot(slot, stack);
}
@Override
public int getDrawerCount () {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return 0;
return controller.getDrawerCount();
}
@Override
public IDrawer getDrawer (int slot) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return null;
return controller.getDrawer(slot);
}
@Override
public boolean isDrawerEnabled (int slot) {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return false;
return controller.isDrawerEnabled(slot);
}
@Override
public IDrawerInventory getDrawerInventory () {
TileEntityController controller = getController();
if (controller == null || !controller.isValidSlave(selfCoord))
return null;
return controller.getDrawerInventory();
}
@Override
public void markDirty () {
TileEntityController controller = getController();
if (controller != null && controller.isValidSlave(selfCoord))
controller.markDirty();
super.markDirty();
}
@Override
public boolean markDirtyIfNeeded () {
TileEntityController controller = getController();
if (controller != null && controller.isValidSlave(selfCoord))
return controller.markDirtyIfNeeded();
return false;
}
}
| Fix slave binding problem
| src/com/jaquadro/minecraft/storagedrawers/block/tile/TileEntitySlave.java | Fix slave binding problem | <ide><path>rc/com/jaquadro/minecraft/storagedrawers/block/tile/TileEntitySlave.java
<ide> if (controllerCoord == null)
<ide> return null;
<ide>
<add> ensureInitialized();
<add>
<ide> TileEntity te = worldObj.getTileEntity(controllerCoord.x(), controllerCoord.y(), controllerCoord.z());
<ide> if (!(te instanceof TileEntityController)) {
<ide> controllerCoord = null; |
|
JavaScript | mit | 01de2eb266f4f877e492905f4eddfd0637c936b5 | 0 | umbraco/Umbraco-CMS,KevinJump/Umbraco-CMS,abryukhov/Umbraco-CMS,JimBobSquarePants/Umbraco-CMS,abjerner/Umbraco-CMS,tcmorris/Umbraco-CMS,JimBobSquarePants/Umbraco-CMS,tcmorris/Umbraco-CMS,bjarnef/Umbraco-CMS,JimBobSquarePants/Umbraco-CMS,madsoulswe/Umbraco-CMS,NikRimington/Umbraco-CMS,abryukhov/Umbraco-CMS,robertjf/Umbraco-CMS,bjarnef/Umbraco-CMS,leekelleher/Umbraco-CMS,dawoe/Umbraco-CMS,tcmorris/Umbraco-CMS,bjarnef/Umbraco-CMS,marcemarc/Umbraco-CMS,marcemarc/Umbraco-CMS,robertjf/Umbraco-CMS,KevinJump/Umbraco-CMS,abjerner/Umbraco-CMS,arknu/Umbraco-CMS,robertjf/Umbraco-CMS,robertjf/Umbraco-CMS,dawoe/Umbraco-CMS,hfloyd/Umbraco-CMS,mattbrailsford/Umbraco-CMS,abjerner/Umbraco-CMS,tcmorris/Umbraco-CMS,marcemarc/Umbraco-CMS,bjarnef/Umbraco-CMS,leekelleher/Umbraco-CMS,KevinJump/Umbraco-CMS,rasmuseeg/Umbraco-CMS,hfloyd/Umbraco-CMS,leekelleher/Umbraco-CMS,madsoulswe/Umbraco-CMS,leekelleher/Umbraco-CMS,umbraco/Umbraco-CMS,dawoe/Umbraco-CMS,NikRimington/Umbraco-CMS,tcmorris/Umbraco-CMS,abryukhov/Umbraco-CMS,umbraco/Umbraco-CMS,leekelleher/Umbraco-CMS,JimBobSquarePants/Umbraco-CMS,madsoulswe/Umbraco-CMS,arknu/Umbraco-CMS,dawoe/Umbraco-CMS,marcemarc/Umbraco-CMS,NikRimington/Umbraco-CMS,hfloyd/Umbraco-CMS,mattbrailsford/Umbraco-CMS,hfloyd/Umbraco-CMS,rasmuseeg/Umbraco-CMS,mattbrailsford/Umbraco-CMS,arknu/Umbraco-CMS,tcmorris/Umbraco-CMS,rasmuseeg/Umbraco-CMS,hfloyd/Umbraco-CMS,robertjf/Umbraco-CMS,abjerner/Umbraco-CMS,KevinJump/Umbraco-CMS,KevinJump/Umbraco-CMS,marcemarc/Umbraco-CMS,abryukhov/Umbraco-CMS,dawoe/Umbraco-CMS,mattbrailsford/Umbraco-CMS,arknu/Umbraco-CMS,JimBobSquarePants/Umbraco-CMS,umbraco/Umbraco-CMS | (function () {
"use strict";
function LogViewerOverviewController($q, $location, $timeout, logViewerResource, navigationService) {
var vm = this;
vm.loading = false;
vm.canLoadLogs = false;
vm.searches = [];
vm.numberOfErrors = 0;
vm.commonLogMessages = [];
vm.commonLogMessagesCount = 10;
// ChartJS Options - for count/overview of log distribution
vm.logTypeLabels = ["Info", "Debug", "Warning", "Error", "Critical"];
vm.logTypeData = [0, 0, 0, 0, 0];
vm.logTypeColors = [ '#dcdcdc', '#97bbcd', '#46bfbd', '#fdb45c', '#f7464a'];
vm.chartOptions = {
legend: {
display: true,
position: 'left'
}
};
//functions
vm.searchLogQuery = searchLogQuery;
vm.findMessageTemplate = findMessageTemplate;
function preFlightCheck(){
vm.loading = true;
//Do our pre-flight check (to see if we can view logs)
//IE the log file is NOT too big such as 1GB & crash the site
logViewerResource.canViewLogs().then(function(result){
vm.loading = false;
vm.canLoadLogs = result;
if(result){
//Can view logs - so initalise
init();
}
});
}
function init() {
vm.loading = true;
var savedSearches = logViewerResource.getSavedSearches().then(function (data) {
vm.searches = data;
},
// fallback to some defaults if error from API response
function () {
vm.searches = [
{
"name": "Find all logs where the Level is NOT Verbose and NOT Debug",
"query": "Not(@Level='Verbose') and Not(@Level='Debug')"
},
{
"name": "Find all logs that has an exception property (Warning, Error & Critical with Exceptions)",
"query": "Has(@Exception)"
},
{
"name": "Find all logs that have the property 'Duration'",
"query": "Has(Duration)"
},
{
"name": "Find all logs that have the property 'Duration' and the duration is greater than 1000ms",
"query": "Has(Duration) and Duration > 1000"
},
{
"name": "Find all logs that are from the namespace 'Umbraco.Core'",
"query": "StartsWith(SourceContext, 'Umbraco.Core')"
},
{
"name": "Find all logs that use a specific log message template",
"query": "@MessageTemplate = '[Timing {TimingId}] {EndMessage} ({TimingDuration}ms)'"
}
]
});
var numOfErrors = logViewerResource.getNumberOfErrors().then(function (data) {
vm.numberOfErrors = data;
});
var logCounts = logViewerResource.getLogLevelCounts().then(function (data) {
vm.logTypeData = [];
vm.logTypeData.push(data.Information);
vm.logTypeData.push(data.Debug);
vm.logTypeData.push(data.Warning);
vm.logTypeData.push(data.Error);
vm.logTypeData.push(data.Fatal);
});
var commonMsgs = logViewerResource.getMessageTemplates().then(function(data){
vm.commonLogMessages = data;
});
//Set loading indicatior to false when these 3 queries complete
$q.all([savedSearches, numOfErrors, logCounts, commonMsgs]).then(function(data) {
vm.loading = false;
});
$timeout(function () {
navigationService.syncTree({ tree: "logViewer", path: "-1" });
});
}
function searchLogQuery(logQuery){
$location.path("/settings/logViewer/search").search({lq: logQuery});
}
function findMessageTemplate(template){
var logQuery = "@MessageTemplate='" + template.MessageTemplate + "'";
searchLogQuery(logQuery);
}
preFlightCheck();
}
angular.module("umbraco").controller("Umbraco.Editors.LogViewer.OverviewController", LogViewerOverviewController);
})();
| src/Umbraco.Web.UI.Client/src/views/logviewer/overview.controller.js | (function () {
"use strict";
function LogViewerOverviewController($q, $location, logViewerResource) {
var vm = this;
vm.loading = false;
vm.canLoadLogs = false;
vm.searches = [];
vm.numberOfErrors = 0;
vm.commonLogMessages = [];
vm.commonLogMessagesCount = 10;
// ChartJS Options - for count/overview of log distribution
vm.logTypeLabels = ["Info", "Debug", "Warning", "Error", "Critical"];
vm.logTypeData = [0, 0, 0, 0, 0];
vm.logTypeColors = [ '#dcdcdc', '#97bbcd', '#46bfbd', '#fdb45c', '#f7464a'];
vm.chartOptions = {
legend: {
display: true,
position: 'left'
}
};
//functions
vm.searchLogQuery = searchLogQuery;
vm.findMessageTemplate = findMessageTemplate;
function preFlightCheck(){
vm.loading = true;
//Do our pre-flight check (to see if we can view logs)
//IE the log file is NOT too big such as 1GB & crash the site
logViewerResource.canViewLogs().then(function(result){
vm.loading = false;
vm.canLoadLogs = result;
if(result){
//Can view logs - so initalise
init();
}
});
}
function init() {
vm.loading = true;
var savedSearches = logViewerResource.getSavedSearches().then(function (data) {
vm.searches = data;
},
// fallback to some defaults if error from API response
function () {
vm.searches = [
{
"name": "Find all logs where the Level is NOT Verbose and NOT Debug",
"query": "Not(@Level='Verbose') and Not(@Level='Debug')"
},
{
"name": "Find all logs that has an exception property (Warning, Error & Critical with Exceptions)",
"query": "Has(@Exception)"
},
{
"name": "Find all logs that have the property 'Duration'",
"query": "Has(Duration)"
},
{
"name": "Find all logs that have the property 'Duration' and the duration is greater than 1000ms",
"query": "Has(Duration) and Duration > 1000"
},
{
"name": "Find all logs that are from the namespace 'Umbraco.Core'",
"query": "StartsWith(SourceContext, 'Umbraco.Core')"
},
{
"name": "Find all logs that use a specific log message template",
"query": "@MessageTemplate = '[Timing {TimingId}] {EndMessage} ({TimingDuration}ms)'"
}
]
});
var numOfErrors = logViewerResource.getNumberOfErrors().then(function (data) {
vm.numberOfErrors = data;
});
var logCounts = logViewerResource.getLogLevelCounts().then(function (data) {
vm.logTypeData = [];
vm.logTypeData.push(data.Information);
vm.logTypeData.push(data.Debug);
vm.logTypeData.push(data.Warning);
vm.logTypeData.push(data.Error);
vm.logTypeData.push(data.Fatal);
});
var commonMsgs = logViewerResource.getMessageTemplates().then(function(data){
vm.commonLogMessages = data;
});
//Set loading indicatior to false when these 3 queries complete
$q.all([savedSearches, numOfErrors, logCounts, commonMsgs]).then(function(data) {
vm.loading = false;
});
}
function searchLogQuery(logQuery){
$location.path("/settings/logViewer/search").search({lq: logQuery});
}
function findMessageTemplate(template){
var logQuery = "@MessageTemplate='" + template.MessageTemplate + "'";
searchLogQuery(logQuery);
}
preFlightCheck();
}
angular.module("umbraco").controller("Umbraco.Editors.LogViewer.OverviewController", LogViewerOverviewController);
})();
| Synctree in log viewer overview
| src/Umbraco.Web.UI.Client/src/views/logviewer/overview.controller.js | Synctree in log viewer overview | <ide><path>rc/Umbraco.Web.UI.Client/src/views/logviewer/overview.controller.js
<ide> (function () {
<ide> "use strict";
<ide>
<del> function LogViewerOverviewController($q, $location, logViewerResource) {
<add> function LogViewerOverviewController($q, $location, $timeout, logViewerResource, navigationService) {
<ide>
<ide> var vm = this;
<ide> vm.loading = false;
<ide> vm.loading = false;
<ide> });
<ide>
<add> $timeout(function () {
<add> navigationService.syncTree({ tree: "logViewer", path: "-1" });
<add> });
<ide> }
<ide>
<ide> function searchLogQuery(logQuery){ |
|
Java | apache-2.0 | 0e561b8dd30bce367ef4b4e11846b6b25095ecdf | 0 | CloudSlang/cs-intellij-plugin | package com.intellij.lang.cloudslang.completion.macro;
import com.intellij.codeInsight.template.Expression;
import com.intellij.codeInsight.template.ExpressionContext;
import com.intellij.codeInsight.template.Macro;
import com.intellij.codeInsight.template.Result;
import com.intellij.codeInsight.template.TemplateContextType;
import com.intellij.codeInsight.template.TextResult;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.nio.file.Path;
import static java.nio.file.Paths.get;
import static java.util.Locale.ENGLISH;
import static org.apache.commons.lang3.StringUtils.isNotEmpty;
public class CurrentNamespaceMacro extends Macro {
private static final String DEFAULT_NAMESPACE_TO_USE = "io.cloudslang.content";
private static final String RELATIVE_PATH_TO_PROJECT = "relativePathToProject";
private static final String NAMESPACE_SEPARATOR = ".";
@Override
public String getName() {
return RELATIVE_PATH_TO_PROJECT;
}
@Override
public String getPresentableName() {
return getName();
}
@Override
public Result calculateResult(@NotNull Expression[] params, ExpressionContext context) {
Project project = context.getProject();
if (context.getEditor() == null) {
return new TextResult(DEFAULT_NAMESPACE_TO_USE);
}
PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(context.getEditor().getDocument());
if ((file == null) || (file.getVirtualFile() == null)) {
return new TextResult(DEFAULT_NAMESPACE_TO_USE);
}
VirtualFile virtualFile = file.getVirtualFile();
String editorFilePath = virtualFile.getPath();
String projectPath = project.getBasePath();
return new TextResult(fixNamespace(projectPath, editorFilePath));
}
@Override
public Result calculateQuickResult(@NotNull Expression[] params, ExpressionContext context) {
return calculateResult(params, context);
}
@Override
public boolean isAcceptableInContext(TemplateContextType context) {
return true;
}
private String fixNamespace(final String projectPath, final String filePath) {
// Exclude file name from namespace value
Path relativePath = get(projectPath).relativize(get(new File(filePath).getParent()));
int nameCount = relativePath.getNameCount();
if (nameCount <= 0) {
return DEFAULT_NAMESPACE_TO_USE;
}
StringBuilder strBuilder = new StringBuilder(relativePath.toString().length());
int nameCountMinusOne = nameCount - 1;
for (int index = 0; index < nameCount; index++) {
String cleanPart = fixPathPart(relativePath.getName(index).toString());
if (isNotEmpty(cleanPart)) {
strBuilder.append(cleanPart.toLowerCase(ENGLISH)); // namespace should be lowercase
if (index < nameCountMinusOne) {
strBuilder.append(NAMESPACE_SEPARATOR);
}
}
}
return strBuilder.toString();
}
private String fixPathPart(String pathPart) {
// We don't support dashes and spaces in files only letters, digits and underscores
return pathPart.replaceAll("\\s+", "_").replaceAll("[-]+", "_").replaceAll("[_]+", "_").replaceAll("\\W", "");
}
}
| src/com/intellij/lang/cloudslang/completion/macro/CurrentNamespaceMacro.java | package com.intellij.lang.cloudslang.completion.macro;
import com.intellij.codeInsight.template.Expression;
import com.intellij.codeInsight.template.ExpressionContext;
import com.intellij.codeInsight.template.Macro;
import com.intellij.codeInsight.template.Result;
import com.intellij.codeInsight.template.TemplateContextType;
import com.intellij.codeInsight.template.TextResult;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.nio.file.Path;
import static java.nio.file.Paths.get;
import static java.util.Locale.ENGLISH;
import static org.apache.commons.lang3.StringUtils.isNotEmpty;
public class CurrentNamespaceMacro extends Macro {
private static final String DEFAULT_NAMESPACE_TO_USE = "io.cloudslang.content";
private static final String RELATIVE_PATH_TO_PROJECT = "relativePathToProject";
private static final String NAMESPACE_SEPARATOR = ".";
@Override
public String getName() {
return RELATIVE_PATH_TO_PROJECT;
}
@Override
public String getPresentableName() {
return getName();
}
@Override
public Result calculateResult(@NotNull Expression[] params, ExpressionContext context) {
Project project = context.getProject();
if (context.getEditor() == null) {
return new TextResult(DEFAULT_NAMESPACE_TO_USE);
}
PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(context.getEditor().getDocument());
if ((file == null) || (file.getVirtualFile() == null)) {
return new TextResult(DEFAULT_NAMESPACE_TO_USE);
}
VirtualFile virtualFile = file.getVirtualFile();
String editorFilePath = virtualFile.getPath();
String projectPath = project.getBasePath();
return new TextResult(fixNamespace(projectPath, editorFilePath));
}
@Override
public Result calculateQuickResult(@NotNull Expression[] params, ExpressionContext context) {
return calculateResult(params, context);
}
@Override
public boolean isAcceptableInContext(TemplateContextType context) {
return true;
}
private String fixNamespace(final String projectPath, final String filePath) {
// Exclude file name from namespace value
Path relativePath = get(projectPath).relativize(get(new File(filePath).getParent()));
int nameCount = relativePath.getNameCount();
if (nameCount <= 0) {
return DEFAULT_NAMESPACE_TO_USE;
}
StringBuilder strBuilder = new StringBuilder(relativePath.toString().length());
int nameCountMinusOne = nameCount - 1;
for (int index = 0; index < nameCount; index++) {
String cleanPart = fixPathPart(relativePath.getName(index).toString());
if (isNotEmpty(cleanPart)) {
strBuilder.append(cleanPart.toLowerCase(ENGLISH)); // namespace should be lowercase
if (index < nameCountMinusOne) {
strBuilder.append(NAMESPACE_SEPARATOR);
}
}
}
return strBuilder.toString();
}
private String fixPathPart(String pathPart) {
// We don't support dashes and spaces in files only letters, digits and underscores
return pathPart.replaceAll("\\s+", "_").replaceAll("[-]+", "_").replaceAll("[_]+", "_").replaceAll("\\W", "");
}
}
| Removing space
| src/com/intellij/lang/cloudslang/completion/macro/CurrentNamespaceMacro.java | Removing space | <ide><path>rc/com/intellij/lang/cloudslang/completion/macro/CurrentNamespaceMacro.java
<ide> }
<ide> }
<ide> return strBuilder.toString();
<del>
<ide> }
<ide>
<ide> private String fixPathPart(String pathPart) { |
|
JavaScript | mit | 213f7a1e14ae8cf5b46fffc6ec90af8a48a910a9 | 0 | noslouch/pa,noslouch/pa,noslouch/pa,noslouch/pa,noslouch/pa | /* init.js */
'use strict';
require.config({
paths : {
jquery : 'lib/jquery/jquery',
underscore : 'lib/underscore/underscore-amd.min',
// using extend backbone
backbone : 'utils/backbone',
moment : 'lib/moment/moment.min',
isotope : 'utils/iso',
bbq : 'lib/bbq/jquery.ba-bbq.no-legacy',
foundation : 'lib/foundation/foundation',
tooltips : 'lib/foundation/foundation.tooltips',
fancybox : 'lib/fancybox/jquery.fancybox.pack',
json : 'lib/json/json2'
//'app/router' : 'app/router.1379951484467814434',
//'app/collections/covergallery' : 'app/collections/covergallery.1379951484467814434',
//'app/collections/films' : 'app/collections/films.1379951484467814434',
//'app/collections/instagrams' : 'app/collections/instagrams.1379951484467814434',
//'app/collections/photography' : 'app/collections/photography.1379951484467814434',
//'app/collections/profile' : 'app/collections/profile.1379951484467814434',
//'app/collections/projects' : 'app/collections/projects.1379951484467814434',
//'app/collections/showcases' : 'app/collections/showcases.1379951484467814434',
//'app/models/album' : 'app/models/album.1379951484467814434',
//'app/models/cover' : 'app/models/cover.1379951484467814434',
//'app/models/film' : 'app/models/film.1379951484467814434',
//'app/models/profilesection' : 'app/models/profilesection.1379951484467814434',
//'app/models/project' : 'app/models/project.1379951484467814434',
//'app/models/searchQuery' : 'app/models/searchQuery.1379951484467814434',
//'app/models/showcase' : 'app/models/showcase.1379951484467814434',
//'app/views/chrome' : 'app/views/chrome.1379951484467814434',
//'app/views/filterviews' : 'app/views/filterviews.1379951484467814434',
//'app/views/header' : 'app/views/header.1379951484467814434',
//'app/views/home' : 'app/views/home.1379951484467814434',
//'app/views/page' : 'app/views/page.1379951484467814434',
//'app/views/profileviews' : 'app/views/profileviews.1379951484467814434',
//'app/views/projects' : 'app/views/projects.1379951484467814434',
//'app/views/search' : 'app/views/search.1379951484467814434',
//'app/views/showcaseviews' : 'app/views/showcaseviews.1379951484467814434',
//'app/views/singleviews' : 'app/views/singleviews.1379951484467814434',
//'utils/spinner' : 'utils/spinner.1379951484467814434',
//'tpl/jst' : 'tpl/jst.1379951484467814434'
},
shim : {
'jquery': {
exports: '$'
},
'isotope' : ['jquery'],
'bbq' : {
deps : ['jquery']
},
'foundation' : {
deps : ['jquery']
},
'tooltips' : {
deps : ['jquery', 'foundation']
},
'fancybox' : {
deps : ['jquery']
},
'json' : {
deps : ['jquery']
}
},
waitSeconds : 20
})
require( ['jquery', 'underscore', 'backbone', 'app/router', 'app/views/chrome'],
function( $, _, Backbone, Router ){
Backbone.history.start({ pushState : true, root : '/' })
} )
| js/init.js | /* init.js */
'use strict';
require.config({
paths : {
jquery : 'lib/jquery/jquery',
underscore : 'lib/underscore/underscore-amd.min',
// using extend backbone
backbone : 'utils/backbone',
moment : 'lib/moment/moment.min',
isotope : 'utils/iso',
bbq : 'lib/bbq/jquery.ba-bbq.no-legacy',
foundation : 'lib/foundation/foundation',
tooltips : 'lib/foundation/foundation.tooltips',
fancybox : 'lib/fancybox/jquery.fancybox.pack',
json : 'lib/json/json2',
'app/router' : 'app/router.1379951484467814434',
'app/collections/covergallery' : 'app/collections/covergallery.1379951484467814434',
'app/collections/films' : 'app/collections/films.1379951484467814434',
'app/collections/instagrams' : 'app/collections/instagrams.1379951484467814434',
'app/collections/photography' : 'app/collections/photography.1379951484467814434',
'app/collections/profile' : 'app/collections/profile.1379951484467814434',
'app/collections/projects' : 'app/collections/projects.1379951484467814434',
'app/collections/showcases' : 'app/collections/showcases.1379951484467814434',
'app/models/album' : 'app/models/album.1379951484467814434',
'app/models/cover' : 'app/models/cover.1379951484467814434',
'app/models/film' : 'app/models/film.1379951484467814434',
'app/models/profilesection' : 'app/models/profilesection.1379951484467814434',
'app/models/project' : 'app/models/project.1379951484467814434',
'app/models/searchQuery' : 'app/models/searchQuery.1379951484467814434',
'app/models/showcase' : 'app/models/showcase.1379951484467814434',
'app/views/chrome' : 'app/views/chrome.1379951484467814434',
'app/views/filterviews' : 'app/views/filterviews.1379951484467814434',
'app/views/header' : 'app/views/header.1379951484467814434',
'app/views/home' : 'app/views/home.1379951484467814434',
'app/views/page' : 'app/views/page.1379951484467814434',
'app/views/profileviews' : 'app/views/profileviews.1379951484467814434',
'app/views/projects' : 'app/views/projects.1379951484467814434',
'app/views/search' : 'app/views/search.1379951484467814434',
'app/views/showcaseviews' : 'app/views/showcaseviews.1379951484467814434',
'app/views/singleviews' : 'app/views/singleviews.1379951484467814434',
'utils/spinner' : 'utils/spinner.1379951484467814434',
'tpl/jst' : 'tpl/jst.1379951484467814434'
},
shim : {
'jquery': {
exports: '$'
},
'isotope' : ['jquery'],
'bbq' : {
deps : ['jquery']
},
'foundation' : {
deps : ['jquery']
},
'tooltips' : {
deps : ['jquery', 'foundation']
},
'fancybox' : {
deps : ['jquery']
},
'json' : {
deps : ['jquery']
}
},
waitSeconds : 20
})
require( ['jquery', 'underscore', 'backbone', 'app/router', 'app/views/chrome'],
function( $, _, Backbone, Router ){
Backbone.history.start({ pushState : true, root : '/' })
} )
| removed init.js hash
| js/init.js | removed init.js hash | <ide><path>s/init.js
<ide> foundation : 'lib/foundation/foundation',
<ide> tooltips : 'lib/foundation/foundation.tooltips',
<ide> fancybox : 'lib/fancybox/jquery.fancybox.pack',
<del> json : 'lib/json/json2',
<del> 'app/router' : 'app/router.1379951484467814434',
<del> 'app/collections/covergallery' : 'app/collections/covergallery.1379951484467814434',
<del> 'app/collections/films' : 'app/collections/films.1379951484467814434',
<del> 'app/collections/instagrams' : 'app/collections/instagrams.1379951484467814434',
<del> 'app/collections/photography' : 'app/collections/photography.1379951484467814434',
<del> 'app/collections/profile' : 'app/collections/profile.1379951484467814434',
<del> 'app/collections/projects' : 'app/collections/projects.1379951484467814434',
<del> 'app/collections/showcases' : 'app/collections/showcases.1379951484467814434',
<del> 'app/models/album' : 'app/models/album.1379951484467814434',
<del> 'app/models/cover' : 'app/models/cover.1379951484467814434',
<del> 'app/models/film' : 'app/models/film.1379951484467814434',
<del> 'app/models/profilesection' : 'app/models/profilesection.1379951484467814434',
<del> 'app/models/project' : 'app/models/project.1379951484467814434',
<del> 'app/models/searchQuery' : 'app/models/searchQuery.1379951484467814434',
<del> 'app/models/showcase' : 'app/models/showcase.1379951484467814434',
<del> 'app/views/chrome' : 'app/views/chrome.1379951484467814434',
<del> 'app/views/filterviews' : 'app/views/filterviews.1379951484467814434',
<del> 'app/views/header' : 'app/views/header.1379951484467814434',
<del> 'app/views/home' : 'app/views/home.1379951484467814434',
<del> 'app/views/page' : 'app/views/page.1379951484467814434',
<del> 'app/views/profileviews' : 'app/views/profileviews.1379951484467814434',
<del> 'app/views/projects' : 'app/views/projects.1379951484467814434',
<del> 'app/views/search' : 'app/views/search.1379951484467814434',
<del> 'app/views/showcaseviews' : 'app/views/showcaseviews.1379951484467814434',
<del> 'app/views/singleviews' : 'app/views/singleviews.1379951484467814434',
<del> 'utils/spinner' : 'utils/spinner.1379951484467814434',
<del> 'tpl/jst' : 'tpl/jst.1379951484467814434'
<add> json : 'lib/json/json2'
<add> //'app/router' : 'app/router.1379951484467814434',
<add> //'app/collections/covergallery' : 'app/collections/covergallery.1379951484467814434',
<add> //'app/collections/films' : 'app/collections/films.1379951484467814434',
<add> //'app/collections/instagrams' : 'app/collections/instagrams.1379951484467814434',
<add> //'app/collections/photography' : 'app/collections/photography.1379951484467814434',
<add> //'app/collections/profile' : 'app/collections/profile.1379951484467814434',
<add> //'app/collections/projects' : 'app/collections/projects.1379951484467814434',
<add> //'app/collections/showcases' : 'app/collections/showcases.1379951484467814434',
<add> //'app/models/album' : 'app/models/album.1379951484467814434',
<add> //'app/models/cover' : 'app/models/cover.1379951484467814434',
<add> //'app/models/film' : 'app/models/film.1379951484467814434',
<add> //'app/models/profilesection' : 'app/models/profilesection.1379951484467814434',
<add> //'app/models/project' : 'app/models/project.1379951484467814434',
<add> //'app/models/searchQuery' : 'app/models/searchQuery.1379951484467814434',
<add> //'app/models/showcase' : 'app/models/showcase.1379951484467814434',
<add> //'app/views/chrome' : 'app/views/chrome.1379951484467814434',
<add> //'app/views/filterviews' : 'app/views/filterviews.1379951484467814434',
<add> //'app/views/header' : 'app/views/header.1379951484467814434',
<add> //'app/views/home' : 'app/views/home.1379951484467814434',
<add> //'app/views/page' : 'app/views/page.1379951484467814434',
<add> //'app/views/profileviews' : 'app/views/profileviews.1379951484467814434',
<add> //'app/views/projects' : 'app/views/projects.1379951484467814434',
<add> //'app/views/search' : 'app/views/search.1379951484467814434',
<add> //'app/views/showcaseviews' : 'app/views/showcaseviews.1379951484467814434',
<add> //'app/views/singleviews' : 'app/views/singleviews.1379951484467814434',
<add> //'utils/spinner' : 'utils/spinner.1379951484467814434',
<add> //'tpl/jst' : 'tpl/jst.1379951484467814434'
<ide> },
<ide> shim : {
<ide> 'jquery': { |
|
Java | apache-2.0 | 75f8dea252acfa9eb49a5407521c8199611810d1 | 0 | mrniko/redisson,jackygurui/redisson,redisson/redisson,zhoffice/redisson | /**
* Copyright 2016 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.client.handler;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import org.redisson.api.RFuture;
import org.redisson.client.RedisClient;
import org.redisson.client.RedisClientConfig;
import org.redisson.client.RedisConnection;
import org.redisson.client.protocol.RedisCommands;
import org.redisson.misc.RPromise;
import org.redisson.misc.RedissonPromise;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.FutureListener;
/**
*
* @author Nikita Koksharov
*
*/
public abstract class BaseConnectionHandler<C extends RedisConnection> extends ChannelInboundHandlerAdapter {
final RedisClient redisClient;
final RPromise<C> connectionPromise = new RedissonPromise<C>();
C connection;
public BaseConnectionHandler(RedisClient redisClient) {
super();
this.redisClient = redisClient;
}
@Override
public void channelRegistered(ChannelHandlerContext ctx) throws Exception {
if (connection == null) {
connection = createConnection(ctx);
}
super.channelRegistered(ctx);
}
abstract C createConnection(ChannelHandlerContext ctx);
@Override
public void channelActive(final ChannelHandlerContext ctx) throws Exception {
final AtomicInteger commandsCounter = new AtomicInteger();
List<RFuture<Object>> futures = new ArrayList<RFuture<Object>>();
RedisClientConfig config = redisClient.getConfig();
if (config.getPassword() != null) {
RFuture<Object> future = connection.async(RedisCommands.AUTH, config.getPassword());
futures.add(future);
}
if (config.getDatabase() != 0) {
RFuture<Object> future = connection.async(RedisCommands.SELECT, config.getDatabase());
futures.add(future);
}
if (config.getClientName() != null) {
RFuture<Object> future = connection.async(RedisCommands.CLIENT_SETNAME, config.getClientName());
futures.add(future);
}
if (config.isReadOnly()) {
RFuture<Object> future = connection.async(RedisCommands.READONLY);
futures.add(future);
}
if (futures.isEmpty()) {
ctx.fireChannelActive();
connectionPromise.trySuccess(connection);
return;
}
commandsCounter.set(futures.size());
for (RFuture<Object> future : futures) {
future.addListener(new FutureListener<Object>() {
@Override
public void operationComplete(Future<Object> future) throws Exception {
if (!future.isSuccess()) {
connection.closeAsync();
connectionPromise.tryFailure(future.cause());
return;
}
if (commandsCounter.decrementAndGet() == 0) {
ctx.fireChannelActive();
connectionPromise.trySuccess(connection);
}
}
});
}
}
}
| redisson/src/main/java/org/redisson/client/handler/BaseConnectionHandler.java | /**
* Copyright 2016 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.client.handler;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import org.redisson.api.RFuture;
import org.redisson.client.RedisClient;
import org.redisson.client.RedisClientConfig;
import org.redisson.client.RedisConnection;
import org.redisson.client.protocol.RedisCommands;
import org.redisson.misc.RPromise;
import org.redisson.misc.RedissonPromise;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.FutureListener;
/**
*
* @author Nikita Koksharov
*
*/
public abstract class BaseConnectionHandler<C extends RedisConnection> extends ChannelInboundHandlerAdapter {
final RedisClient redisClient;
final RPromise<C> connectionPromise = new RedissonPromise<C>();
C connection;
public BaseConnectionHandler(RedisClient redisClient) {
super();
this.redisClient = redisClient;
}
@Override
public void channelRegistered(ChannelHandlerContext ctx) throws Exception {
if (connection == null) {
connection = createConnection(ctx);
}
super.channelRegistered(ctx);
}
abstract C createConnection(ChannelHandlerContext ctx);
@Override
public void channelActive(final ChannelHandlerContext ctx) throws Exception {
final AtomicInteger commandsCounter = new AtomicInteger();
List<RFuture<Object>> futures = new ArrayList<RFuture<Object>>();
RedisClientConfig config = redisClient.getConfig();
if (config.getPassword() != null) {
RFuture<Object> future = connection.async(RedisCommands.AUTH, config.getPassword());
futures.add(future);
}
if (config.getDatabase() != 0) {
RFuture<Object> future = connection.async(RedisCommands.SELECT, config.getDatabase());
futures.add(future);
}
if (config.getClientName() != null) {
RFuture<Object> future = connection.async(RedisCommands.CLIENT_SETNAME, config.getClientName());
futures.add(future);
}
if (config.isReadOnly()) {
RFuture<Object> future = connection.async(RedisCommands.READONLY);
futures.add(future);
}
if (futures.isEmpty()) {
connectionPromise.trySuccess(connection);
return;
}
commandsCounter.set(futures.size());
for (RFuture<Object> future : futures) {
future.addListener(new FutureListener<Object>() {
@Override
public void operationComplete(Future<Object> future) throws Exception {
if (!future.isSuccess()) {
connection.closeAsync();
connectionPromise.tryFailure(future.cause());
return;
}
if (commandsCounter.decrementAndGet() == 0) {
BaseConnectionHandler.super.channelActive(ctx);
connectionPromise.trySuccess(connection);
}
}
});
}
}
}
| fireChannelActive should be invoked on successful connection establishment
| redisson/src/main/java/org/redisson/client/handler/BaseConnectionHandler.java | fireChannelActive should be invoked on successful connection establishment | <ide><path>edisson/src/main/java/org/redisson/client/handler/BaseConnectionHandler.java
<ide> }
<ide>
<ide> if (futures.isEmpty()) {
<add> ctx.fireChannelActive();
<ide> connectionPromise.trySuccess(connection);
<ide> return;
<ide> }
<ide> return;
<ide> }
<ide> if (commandsCounter.decrementAndGet() == 0) {
<del> BaseConnectionHandler.super.channelActive(ctx);
<add> ctx.fireChannelActive();
<ide> connectionPromise.trySuccess(connection);
<ide> }
<ide> } |
|
JavaScript | mit | 8ef1ee4e372b00b154764558fedf6cea21664b38 | 0 | apbodnar/FSPT,apbodnar/FSPT | loadAll(['mesh/bunny.obj'], function(hash){
var t1 = performance.now();
var triangles = parseMesh(hash['mesh/bunny.obj']);
//triangles = triangles.concat(parseMesh(hash['mesh/bunny.obj']));
var bvh = new BVH(triangles, 4);
console.log(bvh.serializeTree());
console.log(performance.now() - t1);
runTest(bvh);
console.log(bvh)
});
function runTest(bvh){
var canvas = document.getElementById('trace');
canvas.width = canvas.height = 400;
var ctx = canvas.getContext('2d');
var t1;
// t1 = performance.now();
// naiveTrace(canvas, ctx, root);
// console.log(performance.now() - t1);
t1 = performance.now();
//treeTrace(canvas, ctx, bvh.root);
//arrayTreeTrace(canvas, ctx, bvh.serializeTree());
stacklessTreeTrace(canvas, ctx, bvh.serializeTree());
console.log(performance.now() - t1);
}
function drawPixels(canvas, ctx, algorithm){
for(var i=0; i<canvas.width; i++){
for(var j=0; j<canvas.height; j++){
var shift = [0, 0.25, 0.75];
var origin = [0, 0, -1];
var halfWidth = canvas.width/2;
var halfHeight = canvas.height/2;
var light = [0, 5, -5];
var dir = normalize(sub([ (i/halfWidth-1), -(j/halfHeight - 1), 0], origin));
origin = add(origin, shift);
var ray = new Ray(origin, dir);
var res = algorithm(ray);
if(res[0] < Infinity && res[0] > 0){
origin = add(origin, scale(dir, res[0]));
dir = normalize(sub(light, origin));
ray = new Ray(origin, dir);
var shadow = algorithm(ray);
var color = getColor(res[1], shadow, dir, light, origin);
ctx.fillStyle = "rgb("+color[0]+","+color[1]+","+color[2]+")";
} else {
ctx.fillStyle = "#ffffff";
}
ctx.fillRect( i, j, 1, 1 );
ctx.fill()
}
}
}
function stacklessTreeTrace(canvas, ctx, array){
var fromSibling = 0,
fromChild = 1,
fromParent = 2;
var rootIdx = 0;
var res = [Infinity, null];
function orderedChildren(ray, nodeIdx){
var node = array[nodeIdx].node,
left = array[nodeIdx].left,
right = array[nodeIdx].right;
if(ray.dir[node.split] > 0){
return {near: left, far: right}
} else {
return {near: right, far: left}
}
}
function traverse(ray){
var res = [Infinity, null];
var state = fromParent;
var current = orderedChildren(ray, rootIdx).near;
while(true){
var fromArray = array[current]
var node = fromArray.node;
var ordered = orderedChildren(ray, current);
switch(state){
case fromChild:
if(current == rootIdx){
return res;
}
var parentOrdered = orderedChildren(ray, fromArray.parent)
if(current == parentOrdered.near){
current = fromArray.sibling;
state = fromSibling;
} else {
current = fromArray.parent
state = fromChild;
}
break;
case fromSibling:
var test = rayBoxIntersect(ray, node.boundingBox)
if(test == Infinity){
current = fromArray.parent;
state = fromChild;
} else if (node.leaf) {
processed = processLeaf(ray, node);
if(processed[0] < res[0]){
res = processed;
}
current = fromArray.parent;
state = fromChild;
} else {
current = ordered.near
state = fromParent;
}
break;
case fromParent:
var test = rayBoxIntersect(ray, node.boundingBox)
if(test == Infinity){
current = fromArray.sibling;
state = fromSibling;
} else if(node.leaf){
processed = processLeaf(ray, node);
if(processed[0] < res[0]){
res = processed;
}
current = fromArray.sibling;
state = fromSibling;
} else {
current = ordered.near;
state = fromParent;
}
break;
}
}
}
drawPixels(canvas, ctx, traverse)
}
function arrayTreeTrace(canvas, ctx, array){
var algorithm = function (ray) {
return findTrianglesFlat(ray, array, 0)
};
drawPixels(canvas, ctx, algorithm)
}
function naiveTrace(canvas, ctx, root){
var algorithm = function(ray){
var hit = Infinity,
tri = null;
for(var k=0; k<root.triangles.length; k++){
var res = rayTriangleIntersect(ray, root.triangles[k]);
if(res[0] < hit){
hit = res[0];
tri = res[1];
}
}
return [hit, tri]
};
drawPixels(canvas, ctx, algorithm)
}
function treeTrace(canvas, ctx, root){
var algorithm = function(ray){
return findTriangles(ray, root);
};
drawPixels(canvas, ctx, algorithm)
}
function closestNode(ray, nLeft, nRight){
var tLeft = rayBoxIntersect(ray, nLeft.boundingBox);
var tRight = rayBoxIntersect(ray, nRight.boundingBox);
var left = tLeft < Infinity ? nLeft : null;
var right = tRight < Infinity ? nRight : null;
if(tLeft < tRight){
return [left, right, tLeft, tRight]
}
return [right, left, tRight, tLeft]
}
function findTrianglesFlat(ray, array, i){
var root = array[i].node;
if(root.leaf){
var res = [Infinity, null]
for(var i=0; i<root.triangles.length; i++){
var tmp = rayTriangleIntersect(ray, root.triangles[i])
if(tmp[0] < res[0]){
res = tmp;
}
}
return res;
}
var left = array[array[i].left].node;
var right = array[array[i].right].node;
left.idx = array[i].left;
right.idx = array[i].right;
var ord = closestNode(ray, left, right);
var closest = [Infinity, null];
for(var i=0 ; i<ord.length; i++){
if(ord[i] && ord[i+2] < closest[0]){
var res = findTrianglesFlat(ray, array, ord[i].idx);
if(res[0] < closest[0]){
closest = res;
}
}
}
return closest;
}
function findTriangles(ray, root){
if(root.leaf){
return processLeaf(ray, root);
}
var ord = closestNode(ray, root.left, root.right);
var closest = [Infinity, null];
for(var i=0 ; i<ord.length; i++){
if(ord[i] && ord[i+2] < closest[0]){
var res = findTriangles(ray, ord[i]);
if(res[0] < closest[0]){
closest = res;
}
}
}
return closest;
}
function getColor(tri, shadow, dir, light, origin){
var norm = normalize(cross(sub(tri.v2, tri.v1), sub(tri.v3, tri.v1)));
var shade = Math.max(dot(dir, norm),0.2);
if(shadow[0] < magnitude(sub(light, origin))){
shade = 0.2;
}
var c = scale([255,255,255], shade);
return c.map(Math.floor)
}
function Ray(origin, dir){
this.origin = origin;
this.dir = dir
}
function rayBoxIntersect(ray, bbox){
var invDir = inverse(ray.dir),
tmin = -Infinity,
tmax = Infinity,
box = bbox.getBounds(),
tx1 = (box[0] - ray.origin[0]) * invDir[0],
tx2 = (box[1] - ray.origin[0]) * invDir[0],
ty1 = (box[2] - ray.origin[1]) * invDir[1],
ty2 = (box[3] - ray.origin[1]) * invDir[1],
tz1 = (box[4] - ray.origin[2]) * invDir[2],
tz2 = (box[5] - ray.origin[2]) * invDir[2];
tmin = Math.max(tmin, Math.min(tx1, tx2));
tmax = Math.min(tmax, Math.max(tx1, tx2));
tmin = Math.max(tmin, Math.min(ty1, ty2));
tmax = Math.min(tmax, Math.max(ty1, ty2));
tmin = Math.max(tmin, Math.min(tz1, tz2));
tmax = Math.min(tmax, Math.max(tz1, tz2));
if (tmax >= tmin){
return tmin;
} else {
return Infinity;
}
}
function processLeaf(ray, root){
var res = [Infinity, null];
for(var i=0; i<root.triangles.length; i++){
var tmp = rayTriangleIntersect(ray, root.triangles[i])
if(tmp[0] < res[0]){
res = tmp;
}
}
return res;
}
function rayTriangleIntersect(ray, tri){
var epsilon = 0.000000000001;
var e1 = sub(tri.v2, tri.v1);
var e2 = sub(tri.v3, tri.v1);
var p = cross(ray.dir, e2);
var det = dot(e1, p);
if(det > -epsilon && det < epsilon){return [Infinity, null]}
var invDet = 1.0 / det;
var t = sub(ray.origin, tri.v1);
var u = dot(t, p) * invDet;
if(u < 0 || u > 1){return [Infinity, null]}
var q = cross(t, e1);
var v = dot(ray.dir, q) * invDet;
if(v < 0 || u + v > 1){return [Infinity, null]}
t = dot(e2, q) * invDet;
if(t > epsilon){
return [t, tri];
}
return [Infinity, null];
}
| test_tree.js | loadAll(['mesh/bunny.obj'], function(hash){
var t1 = performance.now();
var triangles = parseMesh(hash['mesh/bunny.obj']);
//triangles = triangles.concat(parseMesh(hash['mesh/bunny.obj']));
var bvh = new BVH(triangles, 4);
console.log(bvh.serializeTree());
console.log(performance.now() - t1);
runTest(bvh);
console.log(bvh)
});
function runTest(bvh){
var canvas = document.getElementById('trace');
canvas.width = canvas.height = 400;
var ctx = canvas.getContext('2d');
var t1;
// t1 = performance.now();
// naiveTrace(canvas, ctx, root);
// console.log(performance.now() - t1);
t1 = performance.now();
//treeTrace(canvas, ctx, bvh.root);
arrayTreeTrace(canvas, ctx, bvh.serializeTree());
console.log(performance.now() - t1);
}
function drawPixels(canvas, ctx, algorithm){
for(var i=0; i<canvas.width; i++){
for(var j=0; j<canvas.height; j++){
var shift = [0, 0.25, 0.75];
var origin = [0, 0, -1];
var halfWidth = canvas.width/2;
var halfHeight = canvas.height/2;
var light = [0, 5, -5];
var dir = normalize(sub([ (i/halfWidth-1), -(j/halfHeight - 1), 0], origin));
origin = add(origin, shift);
var ray = new Ray(origin, dir);
var res = algorithm(ray);
if(res[0] < Infinity && res[0] > 0){
origin = add(origin, scale(dir, res[0]));
dir = normalize(sub(light, origin));
ray = new Ray(origin, dir);
var shadow = algorithm(ray);
var color = getColor(res[1], shadow, dir, light, origin);
ctx.fillStyle = "rgb("+color[0]+","+color[1]+","+color[2]+")";
} else {
ctx.fillStyle = "#ffffff";
}
ctx.fillRect( i, j, 1, 1 );
ctx.fill()
}
}
}
function stacklessTreeTrace(canvas, ctx, array){
var fromSibling = 0,
fromChild = 1,
fromParent = 2;
var rootIdx = 0;
var res = [Infinity, null];
function orderedChildren(ray, split, rootIdx){
var root = array[rootIdx].node,
left = root.left,
right = root.right;
if(ray.dir[split] > 0){
return {near: left, far: right}
} else {
return {near: right, far: left}
}
}
function traverse(ray){
var state = fromParent;
var current = orderedChildren(ray, array[rootNode].node.split, rootIdx).near;
while(true){
var node = array[current].node;
var ordered = orderedChildren(ray, node.split, current);
switch(state){
case fromChild:
if(current == rootNode){
return res;
}
var parentOrdered =
if(current == array[ordered.near]){
}
break;
case fromSibling:
break;
case fromParent:
break;
}
}
}
}
function arrayTreeTrace(canvas, ctx, array){
var algorithm = function (ray) {
return findTrianglesFlat(ray, array, 0)
};
drawPixels(canvas, ctx, algorithm)
}
function naiveTrace(canvas, ctx, root){
var algorithm = function(ray){
var hit = Infinity,
tri = null;
for(var k=0; k<root.triangles.length; k++){
var res = rayTriangleIntersect(ray, root.triangles[k]);
if(res[0] < hit){
hit = res[0];
tri = res[1];
}
}
return [hit, tri]
};
drawPixels(canvas, ctx, algorithm)
}
function treeTrace(canvas, ctx, root){
var algorithm = function(ray){
return findTriangles(ray, root);
};
drawPixels(canvas, ctx, algorithm)
}
function closestNode(ray, nLeft, nRight){
var tLeft = rayBoxIntersect(ray, nLeft.boundingBox);
var tRight = rayBoxIntersect(ray, nRight.boundingBox);
var left = tLeft < Infinity ? nLeft : null;
var right = tRight < Infinity ? nRight : null;
if(tLeft < tRight){
return [left, right, tLeft, tRight]
}
return [right, left, tRight, tLeft]
}
function findTrianglesFlat(ray, array, i){
var root = array[i].node;
if(root.leaf){
var res = [Infinity, null]
for(var i=0; i<root.triangles.length; i++){
var tmp = rayTriangleIntersect(ray, root.triangles[i])
if(tmp[0] < res[0]){
res = tmp;
}
}
return res;
}
var left = array[array[i].left].node;
var right = array[array[i].right].node;
left.idx = array[i].left;
right.idx = array[i].right;
var ord = closestNode(ray, left, right);
var closest = [Infinity, null];
for(var i=0 ; i<ord.length; i++){
if(ord[i] && ord[i+2] < closest[0]){
var res = findTrianglesFlat(ray, array, ord[i].idx);
if(res[0] < closest[0]){
closest = res;
}
}
}
return closest;
}
function findTriangles(ray, root){
if(root.leaf){
return processLeaf(ray, root);
}
var ord = closestNode(ray, root.left, root.right);
var closest = [Infinity, null];
for(var i=0 ; i<ord.length; i++){
if(ord[i] && ord[i+2] < closest[0]){
var res = findTriangles(ray, ord[i]);
if(res[0] < closest[0]){
closest = res;
}
}
}
return closest;
}
function getColor(tri, shadow, dir, light, origin){
var norm = normalize(cross(sub(tri.v2, tri.v1), sub(tri.v3, tri.v1)));
var shade = Math.max(dot(dir, norm),0.2);
if(shadow[0] < magnitude(sub(light, origin))){
shade = 0.2;
}
var c = scale([255,255,255], shade);
return c.map(Math.floor)
}
function Ray(origin, dir){
this.origin = origin;
this.dir = dir
}
function rayBoxIntersect(ray, bbox){
var invDir = inverse(ray.dir),
tmin = -Infinity,
tmax = Infinity,
box = bbox.getBounds(),
tx1 = (box[0] - ray.origin[0]) * invDir[0],
tx2 = (box[1] - ray.origin[0]) * invDir[0],
ty1 = (box[2] - ray.origin[1]) * invDir[1],
ty2 = (box[3] - ray.origin[1]) * invDir[1],
tz1 = (box[4] - ray.origin[2]) * invDir[2],
tz2 = (box[5] - ray.origin[2]) * invDir[2];
tmin = Math.max(tmin, Math.min(tx1, tx2));
tmax = Math.min(tmax, Math.max(tx1, tx2));
tmin = Math.max(tmin, Math.min(ty1, ty2));
tmax = Math.min(tmax, Math.max(ty1, ty2));
tmin = Math.max(tmin, Math.min(tz1, tz2));
tmax = Math.min(tmax, Math.max(tz1, tz2));
if (tmax >= tmin){
return tmin;
} else {
return Infinity;
}
}
function processLeaf(ray, root){
var res = [Infinity, null];
for(var i=0; i<root.triangles.length; i++){
var tmp = rayTriangleIntersect(ray, root.triangles[i])
if(tmp[0] < res[0]){
res = tmp;
}
}
return res;
}
function rayTriangleIntersect(ray, tri){
var epsilon = 0.000000000001;
var e1 = sub(tri.v2, tri.v1);
var e2 = sub(tri.v3, tri.v1);
var p = cross(ray.dir, e2);
var det = dot(e1, p);
if(det > -epsilon && det < epsilon){return [Infinity, null]}
var invDet = 1.0 / det;
var t = sub(ray.origin, tri.v1);
var u = dot(t, p) * invDet;
if(u < 0 || u > 1){return [Infinity, null]}
var q = cross(t, e1);
var v = dot(ray.dir, q) * invDet;
if(v < 0 || u + v > 1){return [Infinity, null]}
t = dot(e2, q) * invDet;
if(t > epsilon){
return [t, tri];
}
return [Infinity, null];
}
| software stackless traversal
| test_tree.js | software stackless traversal | <ide><path>est_tree.js
<ide> // console.log(performance.now() - t1);
<ide> t1 = performance.now();
<ide> //treeTrace(canvas, ctx, bvh.root);
<del> arrayTreeTrace(canvas, ctx, bvh.serializeTree());
<add> //arrayTreeTrace(canvas, ctx, bvh.serializeTree());
<add> stacklessTreeTrace(canvas, ctx, bvh.serializeTree());
<ide> console.log(performance.now() - t1);
<ide> }
<ide>
<ide> var rootIdx = 0;
<ide> var res = [Infinity, null];
<ide>
<del> function orderedChildren(ray, split, rootIdx){
<del> var root = array[rootIdx].node,
<del> left = root.left,
<del> right = root.right;
<del>
<del> if(ray.dir[split] > 0){
<add> function orderedChildren(ray, nodeIdx){
<add> var node = array[nodeIdx].node,
<add> left = array[nodeIdx].left,
<add> right = array[nodeIdx].right;
<add>
<add> if(ray.dir[node.split] > 0){
<ide> return {near: left, far: right}
<ide> } else {
<ide> return {near: right, far: left}
<ide> }
<ide>
<ide> function traverse(ray){
<add> var res = [Infinity, null];
<ide> var state = fromParent;
<del> var current = orderedChildren(ray, array[rootNode].node.split, rootIdx).near;
<add> var current = orderedChildren(ray, rootIdx).near;
<ide> while(true){
<del> var node = array[current].node;
<del> var ordered = orderedChildren(ray, node.split, current);
<add> var fromArray = array[current]
<add> var node = fromArray.node;
<add> var ordered = orderedChildren(ray, current);
<ide> switch(state){
<ide> case fromChild:
<del> if(current == rootNode){
<add> if(current == rootIdx){
<ide> return res;
<ide> }
<del> var parentOrdered =
<del> if(current == array[ordered.near]){
<del>
<add> var parentOrdered = orderedChildren(ray, fromArray.parent)
<add> if(current == parentOrdered.near){
<add> current = fromArray.sibling;
<add> state = fromSibling;
<add> } else {
<add> current = fromArray.parent
<add> state = fromChild;
<ide> }
<ide> break;
<ide> case fromSibling:
<add> var test = rayBoxIntersect(ray, node.boundingBox)
<add> if(test == Infinity){
<add> current = fromArray.parent;
<add> state = fromChild;
<add> } else if (node.leaf) {
<add> processed = processLeaf(ray, node);
<add> if(processed[0] < res[0]){
<add> res = processed;
<add> }
<add> current = fromArray.parent;
<add> state = fromChild;
<add> } else {
<add> current = ordered.near
<add> state = fromParent;
<add> }
<ide> break;
<ide> case fromParent:
<add> var test = rayBoxIntersect(ray, node.boundingBox)
<add> if(test == Infinity){
<add> current = fromArray.sibling;
<add> state = fromSibling;
<add> } else if(node.leaf){
<add> processed = processLeaf(ray, node);
<add> if(processed[0] < res[0]){
<add> res = processed;
<add> }
<add> current = fromArray.sibling;
<add> state = fromSibling;
<add> } else {
<add> current = ordered.near;
<add> state = fromParent;
<add> }
<ide> break;
<ide> }
<ide> }
<ide> }
<add> drawPixels(canvas, ctx, traverse)
<ide> }
<ide>
<ide> function arrayTreeTrace(canvas, ctx, array){ |
|
Java | agpl-3.0 | 2351849e0108b69c3f8ad551c8a5f3574271f8c0 | 0 | flamingo-geocms/flamingo,flamingo-geocms/flamingo,flamingo-geocms/flamingo,B3Partners/flamingo,B3Partners/flamingo,B3Partners/flamingo,B3Partners/flamingo,flamingo-geocms/flamingo | /*
* Copyright (C) 2012-2013 B3Partners B.V.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package nl.b3p.viewer.stripes;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.io.WKTReader;
import java.io.IOException;
import net.sourceforge.stripes.action.ActionBean;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.persistence.EntityManager;
import javax.servlet.http.HttpServletRequest;
import net.sourceforge.stripes.action.*;
import net.sourceforge.stripes.validation.Validate;
import nl.b3p.viewer.config.app.Application;
import nl.b3p.viewer.config.app.ApplicationLayer;
import nl.b3p.viewer.config.security.Authorizations;
import nl.b3p.viewer.config.services.Layer;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.geotools.data.DataUtilities;
import org.geotools.data.DefaultTransaction;
import org.geotools.data.FeatureSource;
import org.geotools.data.Transaction;
import org.geotools.data.simple.SimpleFeatureStore;
import org.geotools.factory.CommonFactoryFinder;
import org.geotools.filter.identity.FeatureIdImpl;
import org.geotools.filter.text.cql2.CQL;
import org.json.JSONException;
import org.json.JSONObject;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.type.AttributeDescriptor;
import org.opengis.feature.type.GeometryType;
import org.opengis.filter.Filter;
import org.opengis.filter.FilterFactory2;
import org.opengis.filter.identity.FeatureId;
import org.stripesstuff.stripersist.Stripersist;
/**
*
* @author Matthijs Laan
*/
@UrlBinding("/action/feature/edit")
@StrictBinding
public class EditFeatureActionBean implements ActionBean {
private static final Log log = LogFactory.getLog(EditFeatureActionBean.class);
private static final String FID = FeatureInfoActionBean.FID;
private ActionBeanContext context;
@Validate
private Application application;
@Validate
private String feature;
@Validate
private ApplicationLayer appLayer;
private Layer layer;
private SimpleFeatureStore store;
private JSONObject jsonFeature;
//<editor-fold defaultstate="collapsed" desc="getters and setters">
@Override
public ActionBeanContext getContext() {
return context;
}
@Override
public void setContext(ActionBeanContext context) {
this.context = context;
}
public Application getApplication() {
return application;
}
public void setApplication(Application application) {
this.application = application;
}
public String getFeature() {
return feature;
}
public void setFeature(String feature) {
this.feature = feature;
}
public ApplicationLayer getAppLayer() {
return appLayer;
}
public void setAppLayer(ApplicationLayer appLayer) {
this.appLayer = appLayer;
}
public SimpleFeatureStore getStore() {
return store;
}
public JSONObject getJsonFeature() {
return jsonFeature;
}
public Layer getLayer() {
return layer;
}
public String getFID() {
return FID;
}
//</editor-fold>
@DefaultHandler
public Resolution edit() throws JSONException {
JSONObject json = new JSONObject();
json.put("success", Boolean.FALSE);
String error = null;
FeatureSource fs = null;
EntityManager em = Stripersist.getEntityManager();
try {
do {
if(appLayer == null) {
error = "App layer or service not found";
break;
}
if(!Authorizations.isAppLayerWriteAuthorized(application, appLayer, context.getRequest(), em)) {
error = "U heeft geen rechten om deze kaartlaag te bewerken";
break;
}
layer = appLayer.getService().getLayer(appLayer.getLayerName(), em);
if(layer == null) {
error = "Layer not found";
break;
}
if(layer.getFeatureType() == null) {
error ="No feature type";
break;
}
fs = layer.getFeatureType().openGeoToolsFeatureSource();
if(!(fs instanceof SimpleFeatureStore)) {
error = "Feature source does not support editing";
break;
}
store = (SimpleFeatureStore)fs;
addAuditTrailLog();
jsonFeature = new JSONObject(feature);
if (!this.isFeatureWriteAuthorized(appLayer,jsonFeature,context.getRequest())){
error = "U heeft geen rechten om deze feature toe te voegen, te verwijderen en/of te wijzigen";
break;
}
String fid = jsonFeature.optString(FID, null);
if(fid == null) {
json.put(FID, addNewFeature());
} else {
editFeature(fid);
json.put(FID, fid);
}
json.put("success", Boolean.TRUE);
} while(false);
} catch(Exception e) {
log.error(String.format("Exception editing feature", e));
error = e.toString();
if(e.getCause() != null) {
error += "; cause: " + e.getCause().toString();
}
} finally {
if(fs != null) {
fs.getDataStore().dispose();
}
}
if(error != null) {
json.put("error", error);
log.error("Returned error message editing feature: " + error);
}
return new StreamingResolution("application/json", new StringReader(json.toString(4)));
}
public Resolution saveRelatedFeatures() throws JSONException {
JSONObject json = new JSONObject();
json.put("success", Boolean.FALSE);
String error = null;
FeatureSource fs = null;
EntityManager em = Stripersist.getEntityManager();
if (appLayer == null) {
error = "App layer or service not found";
}
if (!Authorizations.isAppLayerWriteAuthorized(application, appLayer, context.getRequest(), em)) {
error = "U heeft geen rechten om deze kaartlaag te bewerken";
}
layer = appLayer.getService().getLayer(appLayer.getLayerName(), em);
if (layer.getFeatureType().hasRelations()) {
String label;
for (FeatureTypeRelation rel : layer.getFeatureType().getRelations()) {
if (rel.getType().equals(FeatureTypeRelation.RELATE)) {
try {
SimpleFeatureType fType = rel.getForeignFeatureType();
label = fType.getDescription() == null ? fType.getTypeName() : fType.getDescription();
fs = fType.openGeoToolsFeatureSource(5000);
store = (SimpleFeatureStore) fs;
jsonFeature = new JSONObject(feature);
String fid = jsonFeature.optString(FID, null);
if (fid == null || fid.equals("")) {
json.put(FID, addNewFeature());
} else {
jsonFeature.remove("rel_id");
//editFeature(fid);
Transaction transaction = new DefaultTransaction("edit");
store.setTransaction(transaction);
FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2();
Filter filter = ff.id(new FeatureIdImpl(fid));
List<String> attributes = new ArrayList<String>();
List values = new ArrayList();
for (Iterator<String> it = jsonFeature.keys(); it.hasNext();) {
String attribute = it.next();
if (!FID.equals(attribute)) {
AttributeDescriptor ad = store.getSchema().getDescriptor(attribute);
if (ad != null) {
attributes.add(attribute);
//System.out.println(attribute);
String v = jsonFeature.getString(attribute);
//System.out.println(v);
values.add(StringUtils.defaultIfBlank(v, null));
}
}
}
log.debug(String.format("Modifying feature source #%d fid=%s, attributes=%s, values=%s",
layer.getFeatureType().getId(),
fid,
attributes.toString(),
values.toString()));
try {
store.modifyFeatures(attributes.toArray(new String[]{}), values.toArray(), filter);
transaction.commit();
} catch (Exception e) {
transaction.rollback();
throw e;
} finally {
transaction.close();
}
json.put(FID, fid);
}
json.put("success", Boolean.TRUE);
} catch (Exception ex) {
log.error(String.format("cannot save relatedFeature Exception: ",ex));
}
}
}
fs.getDataStore().dispose();
}
return new StreamingResolution("application/json", new StringReader(json.toString(4)));
}
public Resolution delete() throws JSONException {
JSONObject json = new JSONObject();
json.put("success", Boolean.FALSE);
String error = null;
FeatureSource fs = null;
EntityManager em = Stripersist.getEntityManager();
try {
do {
if(appLayer == null) {
error = "App layer or service not found";
break;
}
if(!Authorizations.isAppLayerWriteAuthorized(application, appLayer, context.getRequest(), em)) {
error = "U heeft geen rechten om deze kaartlaag te bewerken";
break;
}
layer = appLayer.getService().getLayer(appLayer.getLayerName(), em);
if(layer == null) {
error = "Layer not found";
break;
}
if (!Authorizations.isLayerGeomWriteAuthorized(layer, context.getRequest(), em)) {
error = "U heeft geen rechten om de geometrie van deze kaartlaag te bewerken";
break;
}
if(layer.getFeatureType() == null) {
error ="No feature type";
break;
}
fs = layer.getFeatureType().openGeoToolsFeatureSource();
if(!(fs instanceof SimpleFeatureStore)) {
error = "Feature source does not support editing";
break;
}
store = (SimpleFeatureStore)fs;
jsonFeature = new JSONObject(feature);
if (!this.isFeatureWriteAuthorized(appLayer,jsonFeature,context.getRequest())){
error = "U heeft geen rechten om deze feature toe te voegen en/of te wijzigen";
break;
}
String fid = jsonFeature.optString(FID, null);
if(fid == null) {
error = "Feature without FID can't be deleted";
break;
} else {
deleteFeature(fid);
}
json.put("success", Boolean.TRUE);
} while(false);
} catch(Exception e) {
log.error(String.format("Exception editing feature", e));
error = e.toString();
if(e.getCause() != null) {
error += "; cause: " + e.getCause().toString();
}
} finally {
if(fs != null) {
fs.getDataStore().dispose();
}
}
if(error != null) {
json.put("error", error);
log.error("Returned error message editing feature: " + error);
}
return new StreamingResolution("application/json", new StringReader(json.toString(4)));
}
protected String addNewFeature() throws Exception {
SimpleFeature f = DataUtilities.template(store.getSchema());
Transaction transaction = new DefaultTransaction("create");
store.setTransaction(transaction);
for(AttributeDescriptor ad: store.getSchema().getAttributeDescriptors()) {
if(ad.getType() instanceof GeometryType) {
String wkt = jsonFeature.optString(ad.getLocalName(), null);
Geometry g = null;
if(wkt != null) {
g = new WKTReader().read(wkt);
}
f.setDefaultGeometry(g);
} else {
String v = jsonFeature.optString(ad.getLocalName());
f.setAttribute(ad.getLocalName(), StringUtils.defaultIfBlank(v, null));
}
}
log.debug(String.format("Creating new feature in feature source source #%d: %s",
layer.getFeatureType().getId(),
f.toString()));
try {
List<FeatureId> ids = store.addFeatures(DataUtilities.collection(f));
transaction.commit();
return ids.get(0).getID();
} catch (Exception e) {
transaction.rollback();
throw e;
} finally {
transaction.close();
}
}
protected void deleteFeature(String fid) throws IOException, Exception {
Transaction transaction = new DefaultTransaction("edit");
store.setTransaction(transaction);
FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2();
Filter filter = ff.id(new FeatureIdImpl(fid));
try {
store.removeFeatures(filter);
transaction.commit();
} catch (Exception e) {
transaction.rollback();
throw e;
} finally {
transaction.close();
}
}
protected void editFeature(String fid) throws Exception {
Transaction transaction = new DefaultTransaction("edit");
store.setTransaction(transaction);
FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2();
Filter filter = ff.id(new FeatureIdImpl(fid));
List<String> attributes = new ArrayList<String>();
List values = new ArrayList();
for(Iterator<String> it = jsonFeature.keys(); it.hasNext();) {
String attribute = it.next();
if(!FID.equals(attribute)) {
AttributeDescriptor ad = store.getSchema().getDescriptor(attribute);
if (ad != null) {
if (!isAttributeUserEditingDisabled(attribute)) {
attributes.add(attribute);
if (ad.getType() instanceof GeometryType) {
String wkt = jsonFeature.getString(ad.getLocalName());
Geometry g = null;
if (wkt != null) {
g = new WKTReader().read(wkt);
}
values.add(g);
} else {
String v = jsonFeature.getString(attribute);
values.add(StringUtils.defaultIfBlank(v, null));
}
} else {
log.info(String.format("Attribute \"%s\" not user editable; ignoring", attribute));
}
} else {
log.warn(String.format("Attribute \"%s\" not in feature type; ignoring", attribute));
}
}
}
log.debug(String.format("Modifying feature source #%d fid=%s, attributes=%s, values=%s",
layer.getFeatureType().getId(),
fid,
attributes.toString(),
values.toString()));
try {
store.modifyFeatures(attributes.toArray(new String[] {}), values.toArray(), filter);
transaction.commit();
} catch (Exception e) {
transaction.rollback();
throw e;
} finally {
transaction.close();
}
}
/**
* Check that if {@code disableUserEdit} flag is set on the attribute.
*
* @param attrName attribute to check
* @return {@code true} when the configured attribute is flagged as
* "readOnly"
*/
protected boolean isAttributeUserEditingDisabled(String attrName) {
return this.getAppLayer().getAttribute(this.getLayer().getFeatureType(), attrName).isDisableUserEdit();
}
private boolean isFeatureWriteAuthorized(ApplicationLayer appLayer, JSONObject jsonFeature, HttpServletRequest request) {
if (appLayer.getDetails()!=null && appLayer.getDetails().containsKey("editfeature.usernameAttribute")){
String attr=appLayer.getDetails().get("editfeature.usernameAttribute").getValue();
String featureUsername=jsonFeature.optString(attr);
if (featureUsername!=null && featureUsername.equals(request.getRemoteUser())){
return true;
}else{
return false;
}
}
return true;
}
/**
* Method to query the datastore with a dummy query, containing the username. This is used for an audittrail.
* A query is composed using the
* first attribute from the type, and constructing a Query with it:
* {@code <firstattribute> = 'username is <username>'}.
*/
private void addAuditTrailLog() {
try{
List<AttributeDescriptor> attributeDescriptors = store.getSchema().getAttributeDescriptors();
String typeName = null;
for (AttributeDescriptor ad : attributeDescriptors) {
// Get an attribute of type string. This because the username is almost always a string, and passing it to a Integer/Double will result in a invalid
// query which will not log the passed values (possibly because the use of geotools).
if (ad.getType().getBinding() == String.class) {
typeName = ad.getLocalName();
break;
}
}
if (typeName == null) {
typeName = store.getSchema().getAttributeDescriptors().get(0).getLocalName();
log.warn("Audittrail: cannot find attribute of type double/integer or string. Take the first attribute.");
}
String username = context.getRequest().getRemoteUser();
String[] dummyValues = new String[]{"a", "b"}; // use these values for creating a statement which will always fail: attribute1 = a AND attribute1 = b.
String valueToInsert = "username = " + username;
store.modifyFeatures(typeName, valueToInsert, CQL.toFilter(typeName + " = '" + dummyValues[0] + "' and " + typeName + " = '" + dummyValues[1] + "'"));
} catch (Exception ex) {
// Swallow all exceptions, because this inherently fails. It's only use is to log the application username, so it can be matched (via the database process id
// to the following insert/update/delete statement.
}
}
}
| viewer/src/main/java/nl/b3p/viewer/stripes/EditFeatureActionBean.java | /*
* Copyright (C) 2012-2013 B3Partners B.V.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package nl.b3p.viewer.stripes;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.io.WKTReader;
import java.io.IOException;
import net.sourceforge.stripes.action.ActionBean;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.persistence.EntityManager;
import javax.servlet.http.HttpServletRequest;
import net.sourceforge.stripes.action.*;
import net.sourceforge.stripes.validation.Validate;
import nl.b3p.viewer.config.app.Application;
import nl.b3p.viewer.config.app.ApplicationLayer;
import nl.b3p.viewer.config.security.Authorizations;
import nl.b3p.viewer.config.services.Layer;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.geotools.data.DataUtilities;
import org.geotools.data.DefaultTransaction;
import org.geotools.data.FeatureSource;
import org.geotools.data.Query;
import org.geotools.data.Transaction;
import org.geotools.data.simple.SimpleFeatureStore;
import org.geotools.factory.CommonFactoryFinder;
import org.geotools.filter.identity.FeatureIdImpl;
import org.geotools.filter.text.cql2.CQL;
import org.json.JSONException;
import org.json.JSONObject;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.type.AttributeDescriptor;
import org.opengis.feature.type.GeometryType;
import org.opengis.filter.Filter;
import org.opengis.filter.FilterFactory2;
import org.opengis.filter.identity.FeatureId;
import org.stripesstuff.stripersist.Stripersist;
/**
*
* @author Matthijs Laan
*/
@UrlBinding("/action/feature/edit")
@StrictBinding
public class EditFeatureActionBean implements ActionBean {
private static final Log log = LogFactory.getLog(EditFeatureActionBean.class);
private static final String FID = FeatureInfoActionBean.FID;
private ActionBeanContext context;
@Validate
private Application application;
@Validate
private String feature;
@Validate
private ApplicationLayer appLayer;
private Layer layer;
private SimpleFeatureStore store;
private JSONObject jsonFeature;
//<editor-fold defaultstate="collapsed" desc="getters and setters">
@Override
public ActionBeanContext getContext() {
return context;
}
@Override
public void setContext(ActionBeanContext context) {
this.context = context;
}
public Application getApplication() {
return application;
}
public void setApplication(Application application) {
this.application = application;
}
public String getFeature() {
return feature;
}
public void setFeature(String feature) {
this.feature = feature;
}
public ApplicationLayer getAppLayer() {
return appLayer;
}
public void setAppLayer(ApplicationLayer appLayer) {
this.appLayer = appLayer;
}
public SimpleFeatureStore getStore() {
return store;
}
public JSONObject getJsonFeature() {
return jsonFeature;
}
public Layer getLayer() {
return layer;
}
public String getFID() {
return FID;
}
//</editor-fold>
@DefaultHandler
public Resolution edit() throws JSONException {
JSONObject json = new JSONObject();
json.put("success", Boolean.FALSE);
String error = null;
FeatureSource fs = null;
EntityManager em = Stripersist.getEntityManager();
try {
do {
if(appLayer == null) {
error = "App layer or service not found";
break;
}
if(!Authorizations.isAppLayerWriteAuthorized(application, appLayer, context.getRequest(), em)) {
error = "U heeft geen rechten om deze kaartlaag te bewerken";
break;
}
layer = appLayer.getService().getLayer(appLayer.getLayerName(), em);
if(layer == null) {
error = "Layer not found";
break;
}
if(layer.getFeatureType() == null) {
error ="No feature type";
break;
}
fs = layer.getFeatureType().openGeoToolsFeatureSource();
if(!(fs instanceof SimpleFeatureStore)) {
error = "Feature source does not support editing";
break;
}
store = (SimpleFeatureStore)fs;
addAuditTrailLog();
jsonFeature = new JSONObject(feature);
if (!this.isFeatureWriteAuthorized(appLayer,jsonFeature,context.getRequest())){
error = "U heeft geen rechten om deze feature toe te voegen, te verwijderen en/of te wijzigen";
break;
}
String fid = jsonFeature.optString(FID, null);
if(fid == null) {
json.put(FID, addNewFeature());
} else {
editFeature(fid);
json.put(FID, fid);
}
json.put("success", Boolean.TRUE);
} while(false);
} catch(Exception e) {
log.error(String.format("Exception editing feature", e));
error = e.toString();
if(e.getCause() != null) {
error += "; cause: " + e.getCause().toString();
}
} finally {
if(fs != null) {
fs.getDataStore().dispose();
}
}
if(error != null) {
json.put("error", error);
log.error("Returned error message editing feature: " + error);
}
return new StreamingResolution("application/json", new StringReader(json.toString(4)));
}
public Resolution saveRelatedFeatures() throws JSONException {
JSONObject json = new JSONObject();
json.put("success", Boolean.FALSE);
String error = null;
FeatureSource fs = null;
EntityManager em = Stripersist.getEntityManager();
if (appLayer == null) {
error = "App layer or service not found";
}
if (!Authorizations.isAppLayerWriteAuthorized(application, appLayer, context.getRequest(), em)) {
error = "U heeft geen rechten om deze kaartlaag te bewerken";
}
layer = appLayer.getService().getLayer(appLayer.getLayerName(), em);
if (layer.getFeatureType().hasRelations()) {
String label;
for (FeatureTypeRelation rel : layer.getFeatureType().getRelations()) {
if (rel.getType().equals(FeatureTypeRelation.RELATE)) {
try {
SimpleFeatureType fType = rel.getForeignFeatureType();
label = fType.getDescription() == null ? fType.getTypeName() : fType.getDescription();
fs = fType.openGeoToolsFeatureSource(5000);
store = (SimpleFeatureStore) fs;
jsonFeature = new JSONObject(feature);
String fid = jsonFeature.optString(FID, null);
if (fid == null || fid.equals("")) {
json.put(FID, addNewFeature());
} else {
jsonFeature.remove("rel_id");
//editFeature(fid);
Transaction transaction = new DefaultTransaction("edit");
store.setTransaction(transaction);
FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2();
Filter filter = ff.id(new FeatureIdImpl(fid));
List<String> attributes = new ArrayList<String>();
List values = new ArrayList();
for (Iterator<String> it = jsonFeature.keys(); it.hasNext();) {
String attribute = it.next();
if (!FID.equals(attribute)) {
AttributeDescriptor ad = store.getSchema().getDescriptor(attribute);
if (ad != null) {
attributes.add(attribute);
//System.out.println(attribute);
String v = jsonFeature.getString(attribute);
//System.out.println(v);
values.add(StringUtils.defaultIfBlank(v, null));
}
}
}
log.debug(String.format("Modifying feature source #%d fid=%s, attributes=%s, values=%s",
layer.getFeatureType().getId(),
fid,
attributes.toString(),
values.toString()));
try {
store.modifyFeatures(attributes.toArray(new String[]{}), values.toArray(), filter);
transaction.commit();
} catch (Exception e) {
transaction.rollback();
throw e;
} finally {
transaction.close();
}
json.put(FID, fid);
}
json.put("success", Boolean.TRUE);
} catch (Exception ex) {
Logger.getLogger(EditFeatureActionBean.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
fs.getDataStore().dispose();
}
return new StreamingResolution("application/json", new StringReader(json.toString(4)));
}
public Resolution delete() throws JSONException {
JSONObject json = new JSONObject();
json.put("success", Boolean.FALSE);
String error = null;
FeatureSource fs = null;
EntityManager em = Stripersist.getEntityManager();
try {
do {
if(appLayer == null) {
error = "App layer or service not found";
break;
}
if(!Authorizations.isAppLayerWriteAuthorized(application, appLayer, context.getRequest(), em)) {
error = "U heeft geen rechten om deze kaartlaag te bewerken";
break;
}
layer = appLayer.getService().getLayer(appLayer.getLayerName(), em);
if(layer == null) {
error = "Layer not found";
break;
}
if (!Authorizations.isLayerGeomWriteAuthorized(layer, context.getRequest(), em)) {
error = "U heeft geen rechten om de geometrie van deze kaartlaag te bewerken";
break;
}
if(layer.getFeatureType() == null) {
error ="No feature type";
break;
}
fs = layer.getFeatureType().openGeoToolsFeatureSource();
if(!(fs instanceof SimpleFeatureStore)) {
error = "Feature source does not support editing";
break;
}
store = (SimpleFeatureStore)fs;
jsonFeature = new JSONObject(feature);
if (!this.isFeatureWriteAuthorized(appLayer,jsonFeature,context.getRequest())){
error = "U heeft geen rechten om deze feature toe te voegen en/of te wijzigen";
break;
}
String fid = jsonFeature.optString(FID, null);
if(fid == null) {
error = "Feature without FID can't be deleted";
break;
} else {
deleteFeature(fid);
}
json.put("success", Boolean.TRUE);
} while(false);
} catch(Exception e) {
log.error(String.format("Exception editing feature", e));
error = e.toString();
if(e.getCause() != null) {
error += "; cause: " + e.getCause().toString();
}
} finally {
if(fs != null) {
fs.getDataStore().dispose();
}
}
if(error != null) {
json.put("error", error);
log.error("Returned error message editing feature: " + error);
}
return new StreamingResolution("application/json", new StringReader(json.toString(4)));
}
protected String addNewFeature() throws Exception {
SimpleFeature f = DataUtilities.template(store.getSchema());
Transaction transaction = new DefaultTransaction("create");
store.setTransaction(transaction);
for(AttributeDescriptor ad: store.getSchema().getAttributeDescriptors()) {
if(ad.getType() instanceof GeometryType) {
String wkt = jsonFeature.optString(ad.getLocalName(), null);
Geometry g = null;
if(wkt != null) {
g = new WKTReader().read(wkt);
}
f.setDefaultGeometry(g);
} else {
String v = jsonFeature.optString(ad.getLocalName());
f.setAttribute(ad.getLocalName(), StringUtils.defaultIfBlank(v, null));
}
}
log.debug(String.format("Creating new feature in feature source source #%d: %s",
layer.getFeatureType().getId(),
f.toString()));
try {
List<FeatureId> ids = store.addFeatures(DataUtilities.collection(f));
transaction.commit();
return ids.get(0).getID();
} catch (Exception e) {
transaction.rollback();
throw e;
} finally {
transaction.close();
}
}
protected void deleteFeature(String fid) throws IOException, Exception {
Transaction transaction = new DefaultTransaction("edit");
store.setTransaction(transaction);
FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2();
Filter filter = ff.id(new FeatureIdImpl(fid));
try {
store.removeFeatures(filter);
transaction.commit();
} catch (Exception e) {
transaction.rollback();
throw e;
} finally {
transaction.close();
}
}
protected void editFeature(String fid) throws Exception {
Transaction transaction = new DefaultTransaction("edit");
store.setTransaction(transaction);
FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2();
Filter filter = ff.id(new FeatureIdImpl(fid));
List<String> attributes = new ArrayList<String>();
List values = new ArrayList();
for(Iterator<String> it = jsonFeature.keys(); it.hasNext();) {
String attribute = it.next();
if(!FID.equals(attribute)) {
AttributeDescriptor ad = store.getSchema().getDescriptor(attribute);
if (ad != null) {
if (!isAttributeUserEditingDisabled(attribute)) {
attributes.add(attribute);
if (ad.getType() instanceof GeometryType) {
String wkt = jsonFeature.getString(ad.getLocalName());
Geometry g = null;
if (wkt != null) {
g = new WKTReader().read(wkt);
}
values.add(g);
} else {
String v = jsonFeature.getString(attribute);
values.add(StringUtils.defaultIfBlank(v, null));
}
} else {
log.info(String.format("Attribute \"%s\" not user editable; ignoring", attribute));
}
} else {
log.warn(String.format("Attribute \"%s\" not in feature type; ignoring", attribute));
}
}
}
log.debug(String.format("Modifying feature source #%d fid=%s, attributes=%s, values=%s",
layer.getFeatureType().getId(),
fid,
attributes.toString(),
values.toString()));
try {
store.modifyFeatures(attributes.toArray(new String[] {}), values.toArray(), filter);
transaction.commit();
} catch (Exception e) {
transaction.rollback();
throw e;
} finally {
transaction.close();
}
}
/**
* Check that if {@code disableUserEdit} flag is set on the attribute.
*
* @param attrName attribute to check
* @return {@code true} when the configured attribute is flagged as
* "readOnly"
*/
protected boolean isAttributeUserEditingDisabled(String attrName) {
return this.getAppLayer().getAttribute(this.getLayer().getFeatureType(), attrName).isDisableUserEdit();
}
private boolean isFeatureWriteAuthorized(ApplicationLayer appLayer, JSONObject jsonFeature, HttpServletRequest request) {
if (appLayer.getDetails()!=null && appLayer.getDetails().containsKey("editfeature.usernameAttribute")){
String attr=appLayer.getDetails().get("editfeature.usernameAttribute").getValue();
String featureUsername=jsonFeature.optString(attr);
if (featureUsername!=null && featureUsername.equals(request.getRemoteUser())){
return true;
}else{
return false;
}
}
return true;
}
/**
* Method to query the datastore with a dummy query, containing the username. This is used for an audittrail.
* A query is composed using the
* first attribute from the type, and constructing a Query with it:
* {@code <firstattribute> = 'username is <username>'}.
*/
private void addAuditTrailLog() {
try{
List<AttributeDescriptor> attributeDescriptors = store.getSchema().getAttributeDescriptors();
String typeName = null;
for (AttributeDescriptor ad : attributeDescriptors) {
// Get an attribute of type string. This because the username is almost always a string, and passing it to a Integer/Double will result in a invalid
// query which will not log the passed values (possibly because the use of geotools).
if (ad.getType().getBinding() == String.class) {
typeName = ad.getLocalName();
break;
}
}
if (typeName == null) {
typeName = store.getSchema().getAttributeDescriptors().get(0).getLocalName();
log.warn("Audittrail: cannot find attribute of type double/integer or string. Take the first attribute.");
}
String username = context.getRequest().getRemoteUser();
String[] dummyValues = new String[]{"a", "b"}; // use these values for creating a statement which will always fail: attribute1 = a AND attribute1 = b.
String valueToInsert = "username = " + username;
store.modifyFeatures(typeName, valueToInsert, CQL.toFilter(typeName + " = '" + dummyValues[0] + "' and " + typeName + " = '" + dummyValues[1] + "'"));
} catch (Exception ex) {
// Swallow all exceptions, because this inherently fails. It's only use is to log the application username, so it can be matched (via the database process id
// to the following insert/update/delete statement.
}
}
}
| now using commons-logging instead of java logging
| viewer/src/main/java/nl/b3p/viewer/stripes/EditFeatureActionBean.java | now using commons-logging instead of java logging | <ide><path>iewer/src/main/java/nl/b3p/viewer/stripes/EditFeatureActionBean.java
<ide> import org.geotools.data.DataUtilities;
<ide> import org.geotools.data.DefaultTransaction;
<ide> import org.geotools.data.FeatureSource;
<del>import org.geotools.data.Query;
<ide> import org.geotools.data.Transaction;
<ide> import org.geotools.data.simple.SimpleFeatureStore;
<ide> import org.geotools.factory.CommonFactoryFinder;
<ide> }
<ide> json.put("success", Boolean.TRUE);
<ide> } catch (Exception ex) {
<del> Logger.getLogger(EditFeatureActionBean.class.getName()).log(Level.SEVERE, null, ex);
<add> log.error(String.format("cannot save relatedFeature Exception: ",ex));
<ide> }
<ide> }
<ide> |
|
Java | apache-2.0 | 8c92f7dc8346938e7c80d2bd10153e9a3e975124 | 0 | MIABIS/miabis-converter | package org.miabis.converter.cli;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.miabis.converter.batch.util.Util;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.context.support.AbstractApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
//@ContextConfiguration(locations={"/spring/batch/config/config.xml", "/spring/batch/jobs/job-csv-index.xml"})
public class ConverterCli {
private static Options options;
private static String clustersNodes = "localhost:9300";
private static String delimiter = Util.DELIMITER_TAB;
public static void main(String[] args) throws ParseException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException {
Option indexOpt = Option.builder("i")
.argName("input file(s)")
.longOpt("index")
.hasArgs()
.desc("indexes a set of files. If only one file is supplied it asumes is a MIABIS TAB file, else five files must be supplied (sample, biobank, saple collection, study, contact information). The list of files must be separated by a space.")
.build();
Option clustersNodesOpt = Option.builder("c")
.argName("elastic search cluster")
.longOpt("cluster")
.hasArg()
.desc("with -i: elastic search cluster group. It defaults to "+clustersNodes)
.build();
Option transformOpt = Option.builder("t")
.argName("input files")
.hasArgs()
.longOpt("transform")
.desc("transforms a set of files to MIABIS TAB. Five files must be supplied (sample, biobank, saple collection, study, contact information). The list of files must be separated by a space.")
.build();
Option delimiterOpt = Option.builder("d")
.argName("column delimiter")
.longOpt("delimiter")
.hasArg()
.desc("with -t: column delimiter. It defaults to TAB")
.build();
Option mapOpt = Option.builder("m")
.argName("map file")
.longOpt("map")
.hasArg()
.desc("with -t: miabis mapping file.")
.build();
Option helpOpt = Option.builder("h")
.longOpt("help")
.desc("print this message")
.build();
options = new Options();
options.addOption(indexOpt);
options.addOption(clustersNodesOpt);
options.addOption(helpOpt);
options.addOption(transformOpt);
options.addOption(delimiterOpt);
options.addOption(mapOpt);
CommandLineParser parser = new DefaultParser();
CommandLine cmd = null;
try{
cmd = parser.parse( options, args);
}catch(ParseException exp){
System.err.println( "Parsing failed. Reason: " + exp.getMessage() );
}
if(cmd == null || cmd.hasOption("h")){
printHelp();
return;
}
if(cmd.hasOption("i")){
clustersNodes = cmd.hasOption("c") ? cmd.getOptionValue('c') : clustersNodes;
String[] files = cmd.getOptionValues("i");
if(files.length == 1){
AbstractApplicationContext ctx = new ClassPathXmlApplicationContext(new String[] {"classpath*:**/config.xml", "classpath*:**/job-csv-index.xml"});
ctx.registerShutdownHook();
Job job = (Job) ctx.getBean("job1");
JobLauncher jobLauncher = (JobLauncher) ctx.getBean("jobLauncher");
JobParametersBuilder pb = new JobParametersBuilder();
pb.addString("tab.input", "file:" + files[0]);
pb.addString("clusters.nodes", clustersNodes);
pb.addString("columns", Util.COLUMNS);
jobLauncher.run(job, pb.toJobParameters());
}else if(files.length == 5){
if(!cmd.hasOption("m")){
System.out.println("No mapping file defined.");
return;
}
String map = cmd.getOptionValue("m");
delimiter = cmd.hasOption("d") ? cmd.getOptionValue('d') : delimiter;
AbstractApplicationContext ctx = new ClassPathXmlApplicationContext(new String[] {"classpath*:**/database.xml", "classpath*:**/job-csv-db-index.xml"});
ctx.registerShutdownHook();
JobParametersBuilder pb = new JobParametersBuilder();
pb.addString("sample", "file:" + files[0]);
pb.addString("biobank", "file:" + files[1]);
pb.addString("sampleCollection", "file:" + files[2]);
pb.addString("study", "file:" + files[3]);
pb.addString("contactInfo", "file:" + files[4]);
pb.addString("clusters.nodes", clustersNodes);
//Map
pb.addString("map", map);
JobLauncher jobLauncher = (JobLauncher) ctx.getBean("jobLauncher");
Job job = (Job) ctx.getBean("job1");
jobLauncher.run(job, pb.toJobParameters());
}else{
printHelp();
}
}else if(cmd.hasOption("t")){
if(!cmd.hasOption("m")){
System.out.println("No mapping file defined.");
return;
}
String map = cmd.getOptionValue("m");
delimiter = cmd.hasOption("d") ? cmd.getOptionValue('d') : delimiter;
String[] files = cmd.getOptionValues("t");
AbstractApplicationContext ctx = new ClassPathXmlApplicationContext(new String[] {"classpath*:**/database.xml", "classpath*:**/job-csv-db.xml"});
ctx.registerShutdownHook();
JobParametersBuilder pb = new JobParametersBuilder();
pb.addString("sample", "file:" + files[0]);
pb.addString("biobank", "file:" + files[1]);
pb.addString("sampleCollection", "file:" + files[2]);
pb.addString("study", "file:" + files[3]);
pb.addString("contactInfo", "file:" + files[4]);
//Map
pb.addString("map", map);
//Output file
pb.addString("tab.output", "file:Miabis.tab");
JobLauncher jobLauncher = (JobLauncher) ctx.getBean("jobLauncher");
Job job = (Job) ctx.getBean("job1");
jobLauncher.run(job, pb.toJobParameters());
}else{
printHelp();
}
}
private static void printHelp(){
new HelpFormatter().printHelp("miabis-converter", options);
}
}
| src/main/java/org/miabis/converter/cli/ConverterCli.java | package org.miabis.converter.cli;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.miabis.converter.batch.util.Util;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException;
import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.batch.core.repository.JobRestartException;
import org.springframework.context.support.AbstractApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
//@ContextConfiguration(locations={"/spring/batch/config/config.xml", "/spring/batch/jobs/job-csv-index.xml"})
public class ConverterCli {
private static Options options;
private static String clustersNodes = "localhost:9300";
private static String delimiter = Util.DELIMITER_TAB;
public static void main(String[] args) throws ParseException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException {
Option indexOpt = Option.builder("i")
.argName("input file")
.longOpt("index")
.hasArg()
.desc("Index a file")
.build();
Option clustersNodesOpt = Option.builder("c")
.argName("elastic search cluster")
.longOpt("cluster")
.hasArg()
.desc("with -i: elastic search cluster group. It defaults to "+clustersNodes)
.build();
Option transformOpt = Option.builder("t")
.argName("input files")
.hasArgs()
.longOpt("transform")
.desc("transforms a set of files to MIABIS TAB. Five files must be supplied (sample, biobank, saple collection, study, contact information). The list of files must be separated by a space.")
.build();
Option delimiterOpt = Option.builder("d")
.argName("column delimiter")
.longOpt("delimiter")
.hasArg()
.desc("with -t: column delimiter. It defaults to TAB")
.build();
Option mapOpt = Option.builder("m")
.argName("map file")
.longOpt("map")
.hasArg()
.desc("with -t: miabis mapping file.")
.build();
Option helpOpt = Option.builder("h")
.longOpt("help")
.desc("print this message")
.build();
options = new Options();
options.addOption(indexOpt);
options.addOption(clustersNodesOpt);
options.addOption(helpOpt);
options.addOption(transformOpt);
options.addOption(delimiterOpt);
options.addOption(mapOpt);
CommandLineParser parser = new DefaultParser();
CommandLine cmd = null;
try{
cmd = parser.parse( options, args);
}catch(ParseException exp){
System.err.println( "Parsing failed. Reason: " + exp.getMessage() );
}
if(cmd == null || cmd.hasOption("h")){
printHelp();
return;
}
if(cmd.hasOption("i")){
clustersNodes = cmd.hasOption("c") ? cmd.getOptionValue('c') : clustersNodes;
String inputFile = cmd.getOptionValue("i");
AbstractApplicationContext ctx = new ClassPathXmlApplicationContext(new String[] {"classpath*:**/config.xml", "classpath*:**/job-csv-index.xml"});
ctx.registerShutdownHook();
Job job = (Job) ctx.getBean("job1");
JobLauncher jobLauncher = (JobLauncher) ctx.getBean("jobLauncher");
JobParametersBuilder pb = new JobParametersBuilder();
pb.addString("tab.input", "file:"+inputFile);
pb.addString("clusters.nodes", clustersNodes);
pb.addString("columns", Util.COLUMNS);
jobLauncher.run(job, pb.toJobParameters());
}else if(cmd.hasOption("t")){
if(!cmd.hasOption("m")){
System.out.println("No mapping file defined.");
return;
}
String map = cmd.getOptionValue("m");
delimiter = cmd.hasOption("d") ? cmd.getOptionValue('d') : delimiter;
String[] files = cmd.getOptionValues("t");
AbstractApplicationContext ctx = new ClassPathXmlApplicationContext(new String[] {"classpath*:**/database.xml", "classpath*:**/job-csv-db.xml"});
ctx.registerShutdownHook();
JobParametersBuilder pb = new JobParametersBuilder();
pb.addString("sample", files[0]);
pb.addString("biobank", files[1]);
pb.addString("sampleCollection", files[2]);
pb.addString("study", files[3]);
pb.addString("contactInfo", files[4]);
//Map
pb.addString("map", map);
//Output file
pb.addString("tab.output", "Miabis.tab");
JobLauncher jobLauncher = (JobLauncher) ctx.getBean("jobLauncher");
Job job = (Job) ctx.getBean("job1");
jobLauncher.run(job, pb.toJobParameters());
}else{
printHelp();
}
}
private static void printHelp(){
new HelpFormatter().printHelp("miabis-converter", options);
}
}
| added options
| src/main/java/org/miabis/converter/cli/ConverterCli.java | added options | <ide><path>rc/main/java/org/miabis/converter/cli/ConverterCli.java
<ide> public static void main(String[] args) throws ParseException, JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException {
<ide>
<ide> Option indexOpt = Option.builder("i")
<del> .argName("input file")
<add> .argName("input file(s)")
<ide> .longOpt("index")
<del> .hasArg()
<del> .desc("Index a file")
<add> .hasArgs()
<add> .desc("indexes a set of files. If only one file is supplied it asumes is a MIABIS TAB file, else five files must be supplied (sample, biobank, saple collection, study, contact information). The list of files must be separated by a space.")
<ide> .build();
<ide>
<ide> Option clustersNodesOpt = Option.builder("c")
<ide>
<ide> clustersNodes = cmd.hasOption("c") ? cmd.getOptionValue('c') : clustersNodes;
<ide>
<del> String inputFile = cmd.getOptionValue("i");
<add> String[] files = cmd.getOptionValues("i");
<ide>
<del> AbstractApplicationContext ctx = new ClassPathXmlApplicationContext(new String[] {"classpath*:**/config.xml", "classpath*:**/job-csv-index.xml"});
<del> ctx.registerShutdownHook();
<add> if(files.length == 1){
<add> AbstractApplicationContext ctx = new ClassPathXmlApplicationContext(new String[] {"classpath*:**/config.xml", "classpath*:**/job-csv-index.xml"});
<add> ctx.registerShutdownHook();
<add>
<add> Job job = (Job) ctx.getBean("job1");
<add> JobLauncher jobLauncher = (JobLauncher) ctx.getBean("jobLauncher");
<add>
<add> JobParametersBuilder pb = new JobParametersBuilder();
<add> pb.addString("tab.input", "file:" + files[0]);
<add> pb.addString("clusters.nodes", clustersNodes);
<add> pb.addString("columns", Util.COLUMNS);
<add>
<add> jobLauncher.run(job, pb.toJobParameters());
<add> }else if(files.length == 5){
<add>
<add> if(!cmd.hasOption("m")){
<add> System.out.println("No mapping file defined.");
<add> return;
<add> }
<add>
<add> String map = cmd.getOptionValue("m");
<add> delimiter = cmd.hasOption("d") ? cmd.getOptionValue('d') : delimiter;
<add>
<add> AbstractApplicationContext ctx = new ClassPathXmlApplicationContext(new String[] {"classpath*:**/database.xml", "classpath*:**/job-csv-db-index.xml"});
<add> ctx.registerShutdownHook();
<add>
<add> JobParametersBuilder pb = new JobParametersBuilder();
<add> pb.addString("sample", "file:" + files[0]);
<add> pb.addString("biobank", "file:" + files[1]);
<add> pb.addString("sampleCollection", "file:" + files[2]);
<add> pb.addString("study", "file:" + files[3]);
<add> pb.addString("contactInfo", "file:" + files[4]);
<add>
<add> pb.addString("clusters.nodes", clustersNodes);
<add>
<add> //Map
<add> pb.addString("map", map);
<add>
<add> JobLauncher jobLauncher = (JobLauncher) ctx.getBean("jobLauncher");
<add> Job job = (Job) ctx.getBean("job1");
<add>
<add> jobLauncher.run(job, pb.toJobParameters());
<add> }else{
<add> printHelp();
<add> }
<ide>
<del> Job job = (Job) ctx.getBean("job1");
<del> JobLauncher jobLauncher = (JobLauncher) ctx.getBean("jobLauncher");
<ide>
<del> JobParametersBuilder pb = new JobParametersBuilder();
<del> pb.addString("tab.input", "file:"+inputFile);
<del> pb.addString("clusters.nodes", clustersNodes);
<del> pb.addString("columns", Util.COLUMNS);
<del>
<del> jobLauncher.run(job, pb.toJobParameters());
<ide>
<ide> }else if(cmd.hasOption("t")){
<ide>
<ide> ctx.registerShutdownHook();
<ide>
<ide> JobParametersBuilder pb = new JobParametersBuilder();
<del> pb.addString("sample", files[0]);
<del> pb.addString("biobank", files[1]);
<del> pb.addString("sampleCollection", files[2]);
<del> pb.addString("study", files[3]);
<del> pb.addString("contactInfo", files[4]);
<add> pb.addString("sample", "file:" + files[0]);
<add> pb.addString("biobank", "file:" + files[1]);
<add> pb.addString("sampleCollection", "file:" + files[2]);
<add> pb.addString("study", "file:" + files[3]);
<add> pb.addString("contactInfo", "file:" + files[4]);
<ide>
<ide> //Map
<ide> pb.addString("map", map);
<ide>
<ide> //Output file
<del> pb.addString("tab.output", "Miabis.tab");
<add> pb.addString("tab.output", "file:Miabis.tab");
<ide>
<ide> JobLauncher jobLauncher = (JobLauncher) ctx.getBean("jobLauncher");
<ide> Job job = (Job) ctx.getBean("job1"); |
|
Java | apache-2.0 | de8f3592ee716351b1694f48b797c89e2837caa7 | 0 | superbstreak/drill,johnnywale/drill,bbevens/drill,hsuanyi/incubator-drill,mapr/incubator-drill,vvysotskyi/drill,Ben-Zvi/drill,Serhii-Harnyk/drill,dsbos/incubator-drill,vkorukanti/drill,ketfos/testdrill,amithadke/drill,akumarb2010/incubator-drill,mapr-demos/drill-pcap-format,KulykRoman/drill,sohami/drill,sudheeshkatkam/drill,pwong-mapr/incubator-drill,cocosli/drill,StevenMPhillips/drill,tgrall/drill,bbevens/drill,norrislee/incubator-drill,adityakishore/drill,caijieming-baidu/drill,sindhurirayavaram/drill,jaltekruse/incubator-drill,ppadma/drill,cchang738/drill,hsuanyi/incubator-drill,arina-ielchiieva/drill,rchallapalli/drill,mehant/drill,adeneche/incubator-drill,mehant/drill,caijieming-baidu/drill,activitystream/drill,julianhyde/drill,hnfgns/incubator-drill,bbevens/drill,johnnywale/drill,akumarb2010/incubator-drill,xiaom/drill,vvysotskyi/drill,paul-rogers/drill,apache/drill,norrislee/incubator-drill,jhsbeat/drill,homosepian/drill,sudheeshkatkam/drill,squidsolutions/drill,vdiravka/drill,jaltekruse/incubator-drill,squidsolutions/drill,cchang738/drill,nagix/drill,Ben-Zvi/drill,kristinehahn/drill,KulykRoman/drill,adeneche/incubator-drill,jackyxhb/drill,AdamPD/drill,arina-ielchiieva/drill,maryannxue/drill,Serhii-Harnyk/drill,dsbos/incubator-drill,ebegoli/drill,adityakishore/drill,sudheeshkatkam/drill,xiaom/drill,kkhatua/drill,yssharma/pig-on-drill,zzy6395/drill,arina-ielchiieva/drill,adityakishore/drill,arina-ielchiieva/drill,StevenMPhillips/drill,yssharma/drill,norrislee/incubator-drill,cocosli/drill,jinfengni/incubator-drill,parthchandra/drill,sudheeshkatkam/drill,yufeldman/incubator-drill,dsbos/incubator-drill,mapr-demos/drill-pcap-format,puneetjaiswal/drill,ppadma/drill,AdamPD/drill,arina-ielchiieva/drill,vdiravka/drill,vdiravka/drill,homosepian/drill,ppadma/drill,kristinehahn/drill,jdownton/drill,johanwitters/drill,kkhatua/drill,jaltekruse/incubator-drill,jaltekruse/incubator-drill,cocosli/drill,norrislee/incubator-drill,kkhatua/drill,ketfos/testdrill,cchang738/drill,superbstreak/drill,adeneche/incubator-drill,ssriniva123/drill,rchallapalli/drill,hnfgns/incubator-drill,maryannxue/drill,yssharma/drill,yssharma/drill,abhipol/drill,julianhyde/drill,bbevens/drill,maryannxue/drill,jinfengni/incubator-drill,jacques-n/drill,tshiran/drill,parthchandra/incubator-drill,jhsbeat/drill,parthchandra/incubator-drill,mapr/incubator-drill,pwong-mapr/incubator-drill,yssharma/pig-on-drill,Agirish/drill,abhipol/drill,hsuanyi/incubator-drill,activitystream/drill,mapr/incubator-drill,jdownton/drill,sindhurirayavaram/drill,santoshsahoo/drill,hsuanyi/incubator-drill,jaltekruse/incubator-drill,kingmesal/drill,zzy6395/drill,AdamPD/drill,superbstreak/drill,jackyxhb/drill,xiaom/drill,squidsolutions/drill,xiaom/drill,ssriniva123/drill,julienledem/drill,activitystream/drill,amithadke/drill,StevenMPhillips/drill,kristinehahn/drill,zzy6395/drill,homosepian/drill,nagix/drill,nagix/drill,sohami/drill,julienledem/drill,superbstreak/drill,vdiravka/drill,yufeldman/incubator-drill,bitblender/drill,cwestin/incubator-drill,Agirish/drill,bbevens/drill,mapr/incubator-drill,squidsolutions/drill,cocosli/drill,Agirish/drill,ssriniva123/drill,ketfos/testdrill,vkorukanti/drill,vvysotskyi/drill,jackyxhb/drill,weijietong/drill,tshiran/drill,yssharma/drill,parthchandra/drill,pwong-mapr/incubator-drill,parthchandra/incubator-drill,mehant/drill,paul-rogers/drill,kingmesal/drill,sohami/drill,jacques-n/drill,ketfos/testdrill,mapr-demos/drill-pcap-format,amithadke/drill,ssriniva123/drill,abhipol/drill,xiaom/drill,ssriniva123/drill,mapr-demos/drill-pcap-format,vkorukanti/drill,vdiravka/drill,julienledem/drill,yssharma/pig-on-drill,activitystream/drill,johnnywale/drill,cchang738/drill,KulykRoman/drill,caijieming-baidu/drill,sohami/drill,jinfengni/incubator-drill,santoshsahoo/drill,johnnywale/drill,johanwitters/drill,homosepian/drill,kingmesal/drill,jaltekruse/incubator-drill,tgrall/drill,cwestin/incubator-drill,parthchandra/incubator-drill,bitblender/drill,johanwitters/drill,parthchandra/drill,sohami/drill,santoshsahoo/drill,adeneche/incubator-drill,jhsbeat/drill,mapr-demos/drill-pcap-format,tgrall/drill,apache/drill,StevenMPhillips/drill,kingmesal/drill,yssharma/pig-on-drill,jacques-n/drill,apache/drill,vkorukanti/drill,jackyxhb/drill,cocosli/drill,vkorukanti/drill,KulykRoman/drill,cwestin/incubator-drill,adityakishore/drill,julianhyde/drill,jinfengni/incubator-drill,myroch/drill,julianhyde/drill,zzy6395/drill,dsbos/incubator-drill,sudheeshkatkam/drill,puneetjaiswal/drill,tshiran/drill,parthchandra/drill,yufeldman/incubator-drill,StevenMPhillips/drill,squidsolutions/drill,puneetjaiswal/drill,jhsbeat/drill,mapr-demos/drill-pcap-format,activitystream/drill,myroch/drill,puneetjaiswal/drill,jacques-n/drill,tgrall/drill,weijietong/drill,Ben-Zvi/drill,akumarb2010/incubator-drill,rchallapalli/drill,superbstreak/drill,yssharma/pig-on-drill,yufeldman/incubator-drill,KulykRoman/drill,sindhurirayavaram/drill,AdamPD/drill,paul-rogers/drill,johanwitters/drill,vvysotskyi/drill,vvysotskyi/drill,sindhurirayavaram/drill,bitblender/drill,maryannxue/drill,adityakishore/drill,kristinehahn/drill,Agirish/drill,kkhatua/drill,julienledem/drill,tshiran/drill,maryannxue/drill,hnfgns/incubator-drill,parthchandra/drill,apache/drill,ebegoli/drill,Ben-Zvi/drill,jdownton/drill,apache/drill,mehant/drill,kingmesal/drill,amithadke/drill,Ben-Zvi/drill,arina-ielchiieva/drill,johnnywale/drill,ppadma/drill,kristinehahn/drill,adeneche/incubator-drill,superbstreak/drill,weijietong/drill,mapr/incubator-drill,tgrall/drill,yssharma/pig-on-drill,nagix/drill,AdamPD/drill,ppadma/drill,Serhii-Harnyk/drill,paul-rogers/drill,abhipol/drill,KulykRoman/drill,pwong-mapr/incubator-drill,akumarb2010/incubator-drill,paul-rogers/drill,yufeldman/incubator-drill,myroch/drill,amithadke/drill,hsuanyi/incubator-drill,mapr/incubator-drill,myroch/drill,cchang738/drill,sindhurirayavaram/drill,yssharma/drill,puneetjaiswal/drill,johanwitters/drill,kkhatua/drill,nagix/drill,jhsbeat/drill,akumarb2010/incubator-drill,caijieming-baidu/drill,dsbos/incubator-drill,xiaom/drill,Agirish/drill,zzy6395/drill,santoshsahoo/drill,weijietong/drill,sohami/drill,Serhii-Harnyk/drill,rchallapalli/drill,apache/drill,ebegoli/drill,ebegoli/drill,ketfos/testdrill,Ben-Zvi/drill,bitblender/drill,bitblender/drill,nagix/drill,Serhii-Harnyk/drill,julienledem/drill,tshiran/drill,abhipol/drill,Agirish/drill,kkhatua/drill,caijieming-baidu/drill,rchallapalli/drill,jacques-n/drill,jinfengni/incubator-drill,parthchandra/incubator-drill,myroch/drill,cwestin/incubator-drill,weijietong/drill,homosepian/drill,cwestin/incubator-drill,paul-rogers/drill,ppadma/drill,cchang738/drill,jackyxhb/drill,santoshsahoo/drill,mehant/drill,tshiran/drill,norrislee/incubator-drill,hnfgns/incubator-drill,julianhyde/drill,jdownton/drill,pwong-mapr/incubator-drill,parthchandra/drill,pwong-mapr/incubator-drill,hnfgns/incubator-drill,jdownton/drill,vvysotskyi/drill,ebegoli/drill | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.planner.logical;
import java.math.BigDecimal;
import java.util.GregorianCalendar;
import java.util.List;
import org.apache.drill.common.expression.ExpressionPosition;
import org.apache.drill.common.expression.FieldReference;
import org.apache.drill.common.expression.FunctionCallFactory;
import org.apache.drill.common.expression.IfExpression;
import org.apache.drill.common.expression.IfExpression.IfCondition;
import org.apache.drill.common.expression.LogicalExpression;
import org.apache.drill.common.expression.NullExpression;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.expression.TypedNullConstant;
import org.apache.drill.common.expression.ValueExpressions;
import org.apache.drill.common.expression.ValueExpressions.QuotedString;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.common.types.Types;
import org.eigenbase.rel.RelNode;
import org.eigenbase.reltype.RelDataTypeField;
import org.eigenbase.rex.RexCall;
import org.eigenbase.rex.RexCorrelVariable;
import org.eigenbase.rex.RexDynamicParam;
import org.eigenbase.rex.RexFieldAccess;
import org.eigenbase.rex.RexInputRef;
import org.eigenbase.rex.RexLiteral;
import org.eigenbase.rex.RexLocalRef;
import org.eigenbase.rex.RexNode;
import org.eigenbase.rex.RexOver;
import org.eigenbase.rex.RexRangeRef;
import org.eigenbase.rex.RexVisitorImpl;
import org.eigenbase.sql.SqlSyntax;
import org.eigenbase.sql.fun.SqlStdOperatorTable;
import org.eigenbase.util.NlsString;
import com.google.common.collect.Lists;
/**
* Utilities for Drill's planner.
*/
public class DrillOptiq {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillOptiq.class);
/**
* Converts a tree of {@link RexNode} operators into a scalar expression in Drill syntax.
*/
public static LogicalExpression toDrill(DrillParseContext context, RelNode input, RexNode expr) {
final RexToDrill visitor = new RexToDrill(context, input);
return expr.accept(visitor);
}
private static class RexToDrill extends RexVisitorImpl<LogicalExpression> {
private final RelNode input;
private final DrillParseContext context;
RexToDrill(DrillParseContext context, RelNode input) {
super(true);
this.context = context;
this.input = input;
}
@Override
public LogicalExpression visitInputRef(RexInputRef inputRef) {
final int index = inputRef.getIndex();
final RelDataTypeField field = input.getRowType().getFieldList().get(index);
return FieldReference.getWithQuotedRef(field.getName());
}
@Override
public LogicalExpression visitCall(RexCall call) {
logger.debug("RexCall {}, {}", call);
final SqlSyntax syntax = call.getOperator().getSyntax();
switch (syntax) {
case BINARY:
logger.debug("Binary");
final String funcName = call.getOperator().getName().toLowerCase();
List<LogicalExpression> args = Lists.newArrayList();
for(RexNode r : call.getOperands()){
args.add(r.accept(this));
}
if (FunctionCallFactory.isBooleanOperator(funcName)) {
LogicalExpression func = FunctionCallFactory.createBooleanOperator(funcName, args);
return func;
} else {
args = Lists.reverse(args);
LogicalExpression lastArg = args.get(0);
for(int i = 1; i < args.size(); i++){
lastArg = FunctionCallFactory.createExpression(funcName, Lists.newArrayList(args.get(i), lastArg));
}
return lastArg;
}
case FUNCTION:
case FUNCTION_ID:
logger.debug("Function");
return getDrillFunctionFromOptiqCall(call);
case POSTFIX:
logger.debug("Postfix");
switch(call.getKind()){
case IS_NULL:
case IS_TRUE:
case IS_FALSE:
case OTHER:
return FunctionCallFactory.createExpression(call.getOperator().getName().toLowerCase(),
ExpressionPosition.UNKNOWN, call.getOperands().get(0).accept(this));
}
throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")");
case PREFIX:
logger.debug("Prefix");
LogicalExpression arg = call.getOperands().get(0).accept(this);
switch(call.getKind()){
case NOT:
return FunctionCallFactory.createExpression(call.getOperator().getName().toLowerCase(),
ExpressionPosition.UNKNOWN, arg);
}
throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")");
case SPECIAL:
logger.debug("Special");
switch(call.getKind()){
case CAST:
return getDrillCastFunctionFromOptiq(call);
case LIKE:
case SIMILAR:
return getDrillFunctionFromOptiqCall(call);
case CASE:
List<LogicalExpression> caseArgs = Lists.newArrayList();
for(RexNode r : call.getOperands()){
caseArgs.add(r.accept(this));
}
caseArgs = Lists.reverse(caseArgs);
// number of arguements are always going to be odd, because
// Optiq adds "null" for the missing else expression at the end
assert caseArgs.size()%2 == 1;
LogicalExpression elseExpression = caseArgs.get(0);
for (int i=1; i<caseArgs.size(); i=i+2) {
elseExpression = IfExpression.newBuilder()
.setElse(elseExpression)
.addCondition(new IfCondition(caseArgs.get(i + 1), caseArgs.get(i))).build();
}
return elseExpression;
}
if (call.getOperator() == SqlStdOperatorTable.ITEM) {
SchemaPath left = (SchemaPath) call.getOperands().get(0).accept(this);
final RexLiteral literal = (RexLiteral) call.getOperands().get(1);
switch(literal.getTypeName()){
case DECIMAL:
case INTEGER:
return left.getChild(((BigDecimal)literal.getValue()).intValue());
case CHAR:
return left.getChild(literal.getValue2().toString());
default:
// fall through
}
}
// fall through
default:
throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")");
}
}
private LogicalExpression doUnknown(Object o){
logger.warn("Doesn't currently support consumption of {}.", o);
return NullExpression.INSTANCE;
}
@Override
public LogicalExpression visitLocalRef(RexLocalRef localRef) {
return doUnknown(localRef);
}
@Override
public LogicalExpression visitOver(RexOver over) {
return doUnknown(over);
}
@Override
public LogicalExpression visitCorrelVariable(RexCorrelVariable correlVariable) {
return doUnknown(correlVariable);
}
@Override
public LogicalExpression visitDynamicParam(RexDynamicParam dynamicParam) {
return doUnknown(dynamicParam);
}
@Override
public LogicalExpression visitRangeRef(RexRangeRef rangeRef) {
return doUnknown(rangeRef);
}
@Override
public LogicalExpression visitFieldAccess(RexFieldAccess fieldAccess) {
return super.visitFieldAccess(fieldAccess);
}
private LogicalExpression getDrillCastFunctionFromOptiq(RexCall call){
LogicalExpression arg = call.getOperands().get(0).accept(this);
MajorType castType = null;
switch(call.getType().getSqlTypeName().getName()){
case "VARCHAR":
case "CHAR":
castType = Types.required(MinorType.VARCHAR).toBuilder().setWidth(call.getType().getPrecision()).build();
break;
case "INTEGER": castType = Types.required(MinorType.INT); break;
case "FLOAT": castType = Types.required(MinorType.FLOAT4); break;
case "DOUBLE": castType = Types.required(MinorType.FLOAT8); break;
case "DECIMAL":
int precision = call.getType().getPrecision();
int scale = call.getType().getScale();
if (precision <= 9) {
castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL9).setPrecision(precision).setScale(scale).build();
} else if (precision <= 18) {
castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL18).setPrecision(precision).setScale(scale).build();
} else if (precision <= 28) {
// Inject a cast to SPARSE before casting to the dense type.
castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL28SPARSE).setPrecision(precision).setScale(scale).build();
} else if (precision <= 38) {
castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL38SPARSE).setPrecision(precision).setScale(scale).build();
} else {
throw new UnsupportedOperationException("Only Decimal types with precision range 0 - 38 is supported");
}
break;
case "INTERVAL_YEAR_MONTH": castType = Types.required(MinorType.INTERVALYEAR); break;
case "INTERVAL_DAY_TIME": castType = Types.required(MinorType.INTERVALDAY); break;
case "ANY": return arg; // Type will be same as argument.
default: castType = Types.required(MinorType.valueOf(call.getType().getSqlTypeName().getName()));
}
return FunctionCallFactory.createCast(castType, ExpressionPosition.UNKNOWN, arg);
}
private LogicalExpression getDrillFunctionFromOptiqCall(RexCall call) {
List<LogicalExpression> args = Lists.newArrayList();
for(RexNode n : call.getOperands()){
args.add(n.accept(this));
}
String functionName = call.getOperator().getName().toLowerCase();
// TODO: once we have more function rewrites and a patter emerges from different rewrites, factor this out in a better fashion
/* Rewrite extract functions in the following manner
* extract(year, date '2008-2-23') ---> extractYear(date '2008-2-23')
*/
if (functionName.equals("extract")) {
// Assert that the first argument to extract is a QuotedString
assert args.get(0) instanceof ValueExpressions.QuotedString;
// Get the unit of time to be extracted
String timeUnitStr = ((ValueExpressions.QuotedString)args.get(0)).value;
switch (timeUnitStr){
case ("YEAR"):
case ("MONTH"):
case ("DAY"):
case ("HOUR"):
case ("MINUTE"):
case ("SECOND"):
String functionPostfix = timeUnitStr.substring(0, 1).toUpperCase() + timeUnitStr.substring(1).toLowerCase();
functionName += functionPostfix;
return FunctionCallFactory.createExpression(functionName, args.subList(1, 2));
default:
throw new UnsupportedOperationException("extract function supports the following time units: YEAR, MONTH, DAY, HOUR, MINUTE, SECOND");
}
} else if (functionName.equals("trim")) {
String trimFunc = null;
List<LogicalExpression> trimArgs = Lists.newArrayList();
assert args.get(0) instanceof ValueExpressions.QuotedString;
switch (((ValueExpressions.QuotedString)args.get(0)).value.toUpperCase()) {
case "LEADING":
trimFunc = "ltrim";
break;
case "TRAILING":
trimFunc = "rtrim";
break;
case "BOTH":
trimFunc = "btrim";
break;
default:
assert 1 == 0;
}
trimArgs.add(args.get(2));
trimArgs.add(args.get(1));
return FunctionCallFactory.createExpression(trimFunc, trimArgs);
} else if (functionName.equals("ltrim") || functionName.equals("rtrim") || functionName.equals("btrim")) {
if (args.size() == 1) {
args.add(ValueExpressions.getChar(" "));
}
return FunctionCallFactory.createExpression(functionName, args);
} else if (functionName.equals("date_part")) {
// Rewrite DATE_PART functions as extract functions
// assert that the function has exactly two arguments
assert args.size() == 2;
/* Based on the first input to the date_part function we rewrite the function as the
* appropriate extract function. For example
* date_part('year', date '2008-2-23') ------> extractYear(date '2008-2-23')
*/
assert args.get(0) instanceof QuotedString;
QuotedString extractString = (QuotedString) args.get(0);
String functionPostfix = extractString.value.substring(0, 1).toUpperCase() + extractString.value.substring(1).toLowerCase();
return FunctionCallFactory.createExpression("extract" + functionPostfix, args.subList(1, 2));
} else if (functionName.equals("concat")) {
// Cast arguments to VARCHAR
List<LogicalExpression> concatArgs = Lists.newArrayList();
concatArgs.add(args.get(0));
concatArgs.add(args.get(1));
LogicalExpression first = FunctionCallFactory.createExpression(functionName, concatArgs);
for (int i = 2; i < args.size(); i++) {
concatArgs = Lists.newArrayList();
concatArgs.add(first);
concatArgs.add(args.get(i));
first = FunctionCallFactory.createExpression(functionName, concatArgs);
}
return first;
} else if (functionName.equals("length")) {
if (args.size() == 2) {
// Second argument should always be a literal specifying the encoding format
assert args.get(1) instanceof ValueExpressions.QuotedString;
String encodingType = ((ValueExpressions.QuotedString) args.get(1)).value;
functionName += encodingType.substring(0, 1).toUpperCase() + encodingType.substring(1).toLowerCase();
return FunctionCallFactory.createExpression(functionName, args.subList(0, 1));
}
} else if ((functionName.equals("convert_from") || functionName.equals("convert_to"))
&& args.get(1) instanceof QuotedString) {
return FunctionCallFactory.createConvert(functionName, ((QuotedString)args.get(1)).value, args.get(0), ExpressionPosition.UNKNOWN);
} else if ((functionName.equalsIgnoreCase("rpad")) || functionName.equalsIgnoreCase("lpad")) {
// If we have only two arguments for rpad/lpad append a default QuotedExpression as an argument which will be used to pad the string
if (args.size() == 2) {
String spaceFill = " ";
LogicalExpression fill = ValueExpressions.getChar(spaceFill);
args.add(fill);
}
}
return FunctionCallFactory.createExpression(functionName, args);
}
@Override
public LogicalExpression visitLiteral(RexLiteral literal) {
switch(literal.getType().getSqlTypeName()){
case BIGINT:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.BIGINT);
}
long l = ((BigDecimal) literal.getValue()).longValue();
return ValueExpressions.getBigInt(l);
case BOOLEAN:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.BIT);
}
return ValueExpressions.getBit(((Boolean) literal.getValue()));
case CHAR:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.VARCHAR);
}
return ValueExpressions.getChar(((NlsString)literal.getValue()).getValue());
case DOUBLE:
if (isLiteralNull(literal)){
return createNullExpr(MinorType.FLOAT8);
}
double d = ((BigDecimal) literal.getValue()).doubleValue();
return ValueExpressions.getFloat8(d);
case FLOAT:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.FLOAT4);
}
float f = ((BigDecimal) literal.getValue()).floatValue();
return ValueExpressions.getFloat4(f);
case INTEGER:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.INT);
}
int a = ((BigDecimal) literal.getValue()).intValue();
return ValueExpressions.getInt(a);
case DECIMAL:
/* TODO: Enable using Decimal literals once we have more functions implemented for Decimal
* For now continue using Double instead of decimals
int precision = ((BigDecimal) literal.getValue()).precision();
if (precision <= 9) {
return ValueExpressions.getDecimal9((BigDecimal)literal.getValue());
} else if (precision <= 18) {
return ValueExpressions.getDecimal18((BigDecimal)literal.getValue());
} else if (precision <= 28) {
return ValueExpressions.getDecimal28((BigDecimal)literal.getValue());
} else if (precision <= 38) {
return ValueExpressions.getDecimal38((BigDecimal)literal.getValue());
} */
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.FLOAT8);
}
double dbl = ((BigDecimal) literal.getValue()).doubleValue();
logger.warn("Converting exact decimal into approximate decimal. Should be fixed once decimal is implemented.");
return ValueExpressions.getFloat8(dbl);
case VARCHAR:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.VARCHAR);
}
return ValueExpressions.getChar(((NlsString)literal.getValue()).getValue());
case SYMBOL:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.VARCHAR);
}
return ValueExpressions.getChar(literal.getValue().toString());
case DATE:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.DATE);
}
return (ValueExpressions.getDate((GregorianCalendar)literal.getValue()));
case TIME:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.TIME);
}
return (ValueExpressions.getTime((GregorianCalendar)literal.getValue()));
case TIMESTAMP:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.TIMESTAMP);
}
return (ValueExpressions.getTimeStamp((GregorianCalendar) literal.getValue()));
case INTERVAL_YEAR_MONTH:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.INTERVALYEAR);
}
return (ValueExpressions.getIntervalYear(((BigDecimal) (literal.getValue())).intValue()));
case INTERVAL_DAY_TIME:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.INTERVALDAY);
}
return (ValueExpressions.getIntervalDay(((BigDecimal) (literal.getValue())).longValue()));
case NULL:
return NullExpression.INSTANCE;
case ANY:
if (isLiteralNull(literal)) {
return NullExpression.INSTANCE;
}
default:
throw new UnsupportedOperationException(String.format("Unable to convert the value of %s and type %s to a Drill constant expression.", literal, literal.getType().getSqlTypeName()));
}
}
}
private static final TypedNullConstant createNullExpr(MinorType type) {
return new TypedNullConstant(Types.optional(type));
}
private static boolean isLiteralNull(RexLiteral literal) {
return literal.getTypeName().getName().equals("NULL");
}
}
| exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.planner.logical;
import java.math.BigDecimal;
import java.util.GregorianCalendar;
import java.util.List;
import org.apache.drill.common.expression.ExpressionPosition;
import org.apache.drill.common.expression.FieldReference;
import org.apache.drill.common.expression.FunctionCallFactory;
import org.apache.drill.common.expression.IfExpression;
import org.apache.drill.common.expression.IfExpression.IfCondition;
import org.apache.drill.common.expression.LogicalExpression;
import org.apache.drill.common.expression.NullExpression;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.expression.TypedNullConstant;
import org.apache.drill.common.expression.ValueExpressions;
import org.apache.drill.common.expression.ValueExpressions.QuotedString;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.common.types.Types;
import org.eigenbase.rel.RelNode;
import org.eigenbase.reltype.RelDataTypeField;
import org.eigenbase.rex.RexCall;
import org.eigenbase.rex.RexCorrelVariable;
import org.eigenbase.rex.RexDynamicParam;
import org.eigenbase.rex.RexFieldAccess;
import org.eigenbase.rex.RexInputRef;
import org.eigenbase.rex.RexLiteral;
import org.eigenbase.rex.RexLocalRef;
import org.eigenbase.rex.RexNode;
import org.eigenbase.rex.RexOver;
import org.eigenbase.rex.RexRangeRef;
import org.eigenbase.rex.RexVisitorImpl;
import org.eigenbase.sql.SqlSyntax;
import org.eigenbase.sql.fun.SqlStdOperatorTable;
import org.eigenbase.util.NlsString;
import com.google.common.collect.Lists;
/**
* Utilities for Drill's planner.
*/
public class DrillOptiq {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillOptiq.class);
/**
* Converts a tree of {@link RexNode} operators into a scalar expression in Drill syntax.
*/
public static LogicalExpression toDrill(DrillParseContext context, RelNode input, RexNode expr) {
final RexToDrill visitor = new RexToDrill(context, input);
return expr.accept(visitor);
}
private static class RexToDrill extends RexVisitorImpl<LogicalExpression> {
private final RelNode input;
private final DrillParseContext context;
RexToDrill(DrillParseContext context, RelNode input) {
super(true);
this.context = context;
this.input = input;
}
@Override
public LogicalExpression visitInputRef(RexInputRef inputRef) {
final int index = inputRef.getIndex();
final RelDataTypeField field = input.getRowType().getFieldList().get(index);
return FieldReference.getWithQuotedRef(field.getName());
}
@Override
public LogicalExpression visitCall(RexCall call) {
logger.debug("RexCall {}, {}", call);
final SqlSyntax syntax = call.getOperator().getSyntax();
switch (syntax) {
case BINARY:
logger.debug("Binary");
final String funcName = call.getOperator().getName().toLowerCase();
List<LogicalExpression> args = Lists.newArrayList();
for(RexNode r : call.getOperands()){
args.add(r.accept(this));
}
if (FunctionCallFactory.isBooleanOperator(funcName)) {
LogicalExpression func = FunctionCallFactory.createBooleanOperator(funcName, args);
return func;
} else {
args = Lists.reverse(args);
LogicalExpression lastArg = args.get(0);
for(int i = 1; i < args.size(); i++){
lastArg = FunctionCallFactory.createExpression(funcName, Lists.newArrayList(args.get(i), lastArg));
}
return lastArg;
}
case FUNCTION:
case FUNCTION_ID:
logger.debug("Function");
return getDrillFunctionFromOptiqCall(call);
case POSTFIX:
logger.debug("Postfix");
switch(call.getKind()){
case IS_NULL:
case IS_TRUE:
case IS_FALSE:
case OTHER:
return FunctionCallFactory.createExpression(call.getOperator().getName().toLowerCase(),
ExpressionPosition.UNKNOWN, call.getOperands().get(0).accept(this));
}
throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")");
case PREFIX:
logger.debug("Prefix");
LogicalExpression arg = call.getOperands().get(0).accept(this);
switch(call.getKind()){
case NOT:
return FunctionCallFactory.createExpression(call.getOperator().getName().toLowerCase(),
ExpressionPosition.UNKNOWN, arg);
}
throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")");
case SPECIAL:
logger.debug("Special");
switch(call.getKind()){
case CAST:
return getDrillCastFunctionFromOptiq(call);
case LIKE:
case SIMILAR:
return getDrillFunctionFromOptiqCall(call);
case CASE:
List<LogicalExpression> caseArgs = Lists.newArrayList();
for(RexNode r : call.getOperands()){
caseArgs.add(r.accept(this));
}
caseArgs = Lists.reverse(caseArgs);
// number of arguements are always going to be odd, because
// Optiq adds "null" for the missing else expression at the end
assert caseArgs.size()%2 == 1;
LogicalExpression elseExpression = caseArgs.get(0);
for (int i=1; i<caseArgs.size(); i=i+2) {
elseExpression = IfExpression.newBuilder()
.setElse(elseExpression)
.addCondition(new IfCondition(caseArgs.get(i + 1), caseArgs.get(i))).build();
}
return elseExpression;
}
if (call.getOperator() == SqlStdOperatorTable.ITEM) {
SchemaPath left = (SchemaPath) call.getOperands().get(0).accept(this);
final RexLiteral literal = (RexLiteral) call.getOperands().get(1);
switch(literal.getTypeName()){
case DECIMAL:
case INTEGER:
return left.getChild(((BigDecimal)literal.getValue()).intValue());
case CHAR:
return left.getChild(literal.getValue2().toString());
default:
// fall through
}
}
// fall through
default:
throw new AssertionError("todo: implement syntax " + syntax + "(" + call + ")");
}
}
private LogicalExpression doUnknown(Object o){
logger.warn("Doesn't currently support consumption of {}.", o);
return NullExpression.INSTANCE;
}
@Override
public LogicalExpression visitLocalRef(RexLocalRef localRef) {
return doUnknown(localRef);
}
@Override
public LogicalExpression visitOver(RexOver over) {
return doUnknown(over);
}
@Override
public LogicalExpression visitCorrelVariable(RexCorrelVariable correlVariable) {
return doUnknown(correlVariable);
}
@Override
public LogicalExpression visitDynamicParam(RexDynamicParam dynamicParam) {
return doUnknown(dynamicParam);
}
@Override
public LogicalExpression visitRangeRef(RexRangeRef rangeRef) {
return doUnknown(rangeRef);
}
@Override
public LogicalExpression visitFieldAccess(RexFieldAccess fieldAccess) {
return super.visitFieldAccess(fieldAccess);
}
private LogicalExpression getDrillCastFunctionFromOptiq(RexCall call){
LogicalExpression arg = call.getOperands().get(0).accept(this);
MajorType castType = null;
switch(call.getType().getSqlTypeName().getName()){
case "VARCHAR":
case "CHAR":
castType = Types.required(MinorType.VARCHAR).toBuilder().setWidth(call.getType().getPrecision()).build();
break;
case "INTEGER": castType = Types.required(MinorType.INT); break;
case "FLOAT": castType = Types.required(MinorType.FLOAT4); break;
case "DOUBLE": castType = Types.required(MinorType.FLOAT8); break;
case "DECIMAL":
int precision = call.getType().getPrecision();
int scale = call.getType().getScale();
if (precision <= 9) {
castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL9).setPrecision(precision).setScale(scale).build();
} else if (precision <= 18) {
castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL18).setPrecision(precision).setScale(scale).build();
} else if (precision <= 28) {
// Inject a cast to SPARSE before casting to the dense type.
castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL28SPARSE).setPrecision(precision).setScale(scale).build();
} else if (precision <= 38) {
castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL38SPARSE).setPrecision(precision).setScale(scale).build();
} else {
throw new UnsupportedOperationException("Only Decimal types with precision range 0 - 38 is supported");
}
break;
case "INTERVAL_YEAR_MONTH": castType = Types.required(MinorType.INTERVALYEAR); break;
case "INTERVAL_DAY_TIME": castType = Types.required(MinorType.INTERVALDAY); break;
case "ANY": return arg; // Type will be same as argument.
default: castType = Types.required(MinorType.valueOf(call.getType().getSqlTypeName().getName()));
}
return FunctionCallFactory.createCast(castType, ExpressionPosition.UNKNOWN, arg);
}
private LogicalExpression getDrillFunctionFromOptiqCall(RexCall call) {
List<LogicalExpression> args = Lists.newArrayList();
for(RexNode n : call.getOperands()){
args.add(n.accept(this));
}
String functionName = call.getOperator().getName().toLowerCase();
// TODO: once we have more function rewrites and a patter emerges from different rewrites, factor this out in a better fashion
/* Rewrite extract functions in the following manner
* extract(year, date '2008-2-23') ---> extractYear(date '2008-2-23')
*/
if (functionName.equals("extract")) {
// Assert that the first argument to extract is a QuotedString
assert args.get(0) instanceof ValueExpressions.QuotedString;
// Get the unit of time to be extracted
String timeUnitStr = ((ValueExpressions.QuotedString)args.get(0)).value;
switch (timeUnitStr){
case ("YEAR"):
case ("MONTH"):
case ("DAY"):
case ("HOUR"):
case ("MINUTE"):
case ("SECOND"):
String functionPostfix = timeUnitStr.substring(0, 1).toUpperCase() + timeUnitStr.substring(1).toLowerCase();
functionName += functionPostfix;
return FunctionCallFactory.createExpression(functionName, args.subList(1, 2));
default:
throw new UnsupportedOperationException("extract function supports the following time units: YEAR, MONTH, DAY, HOUR, MINUTE, SECOND");
}
} else if (functionName.equals("trim")) {
String trimFunc = null;
List<LogicalExpression> trimArgs = Lists.newArrayList();
assert args.get(0) instanceof ValueExpressions.QuotedString;
switch (((ValueExpressions.QuotedString)args.get(0)).value.toUpperCase()) {
case "LEADING":
trimFunc = "ltrim";
break;
case "TRAILING":
trimFunc = "rtrim";
break;
case "BOTH":
trimFunc = "btrim";
break;
default:
assert 1 == 0;
}
trimArgs.add(args.get(2));
trimArgs.add(args.get(1));
return FunctionCallFactory.createExpression(trimFunc, trimArgs);
} else if (functionName.equals("date_part")) {
// Rewrite DATE_PART functions as extract functions
// assert that the function has exactly two arguments
assert args.size() == 2;
/* Based on the first input to the date_part function we rewrite the function as the
* appropriate extract function. For example
* date_part('year', date '2008-2-23') ------> extractYear(date '2008-2-23')
*/
assert args.get(0) instanceof QuotedString;
QuotedString extractString = (QuotedString) args.get(0);
String functionPostfix = extractString.value.substring(0, 1).toUpperCase() + extractString.value.substring(1).toLowerCase();
return FunctionCallFactory.createExpression("extract" + functionPostfix, args.subList(1, 2));
} else if (functionName.equals("concat")) {
// Cast arguments to VARCHAR
List<LogicalExpression> concatArgs = Lists.newArrayList();
concatArgs.add(args.get(0));
concatArgs.add(args.get(1));
LogicalExpression first = FunctionCallFactory.createExpression(functionName, concatArgs);
for (int i = 2; i < args.size(); i++) {
concatArgs = Lists.newArrayList();
concatArgs.add(first);
concatArgs.add(args.get(i));
first = FunctionCallFactory.createExpression(functionName, concatArgs);
}
return first;
} else if (functionName.equals("length")) {
if (args.size() == 2) {
// Second argument should always be a literal specifying the encoding format
assert args.get(1) instanceof ValueExpressions.QuotedString;
String encodingType = ((ValueExpressions.QuotedString) args.get(1)).value;
functionName += encodingType.substring(0, 1).toUpperCase() + encodingType.substring(1).toLowerCase();
return FunctionCallFactory.createExpression(functionName, args.subList(0, 1));
}
} else if ((functionName.equals("convert_from") || functionName.equals("convert_to"))
&& args.get(1) instanceof QuotedString) {
return FunctionCallFactory.createConvert(functionName, ((QuotedString)args.get(1)).value, args.get(0), ExpressionPosition.UNKNOWN);
} else if ((functionName.equalsIgnoreCase("rpad")) || functionName.equalsIgnoreCase("lpad")) {
// If we have only two arguments for rpad/lpad append a default QuotedExpression as an argument which will be used to pad the string
if (args.size() == 2) {
String spaceFill = " ";
LogicalExpression fill = ValueExpressions.getChar(spaceFill);
args.add(fill);
}
}
return FunctionCallFactory.createExpression(functionName, args);
}
@Override
public LogicalExpression visitLiteral(RexLiteral literal) {
switch(literal.getType().getSqlTypeName()){
case BIGINT:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.BIGINT);
}
long l = ((BigDecimal) literal.getValue()).longValue();
return ValueExpressions.getBigInt(l);
case BOOLEAN:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.BIT);
}
return ValueExpressions.getBit(((Boolean) literal.getValue()));
case CHAR:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.VARCHAR);
}
return ValueExpressions.getChar(((NlsString)literal.getValue()).getValue());
case DOUBLE:
if (isLiteralNull(literal)){
return createNullExpr(MinorType.FLOAT8);
}
double d = ((BigDecimal) literal.getValue()).doubleValue();
return ValueExpressions.getFloat8(d);
case FLOAT:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.FLOAT4);
}
float f = ((BigDecimal) literal.getValue()).floatValue();
return ValueExpressions.getFloat4(f);
case INTEGER:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.INT);
}
int a = ((BigDecimal) literal.getValue()).intValue();
return ValueExpressions.getInt(a);
case DECIMAL:
/* TODO: Enable using Decimal literals once we have more functions implemented for Decimal
* For now continue using Double instead of decimals
int precision = ((BigDecimal) literal.getValue()).precision();
if (precision <= 9) {
return ValueExpressions.getDecimal9((BigDecimal)literal.getValue());
} else if (precision <= 18) {
return ValueExpressions.getDecimal18((BigDecimal)literal.getValue());
} else if (precision <= 28) {
return ValueExpressions.getDecimal28((BigDecimal)literal.getValue());
} else if (precision <= 38) {
return ValueExpressions.getDecimal38((BigDecimal)literal.getValue());
} */
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.FLOAT8);
}
double dbl = ((BigDecimal) literal.getValue()).doubleValue();
logger.warn("Converting exact decimal into approximate decimal. Should be fixed once decimal is implemented.");
return ValueExpressions.getFloat8(dbl);
case VARCHAR:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.VARCHAR);
}
return ValueExpressions.getChar(((NlsString)literal.getValue()).getValue());
case SYMBOL:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.VARCHAR);
}
return ValueExpressions.getChar(literal.getValue().toString());
case DATE:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.DATE);
}
return (ValueExpressions.getDate((GregorianCalendar)literal.getValue()));
case TIME:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.TIME);
}
return (ValueExpressions.getTime((GregorianCalendar)literal.getValue()));
case TIMESTAMP:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.TIMESTAMP);
}
return (ValueExpressions.getTimeStamp((GregorianCalendar) literal.getValue()));
case INTERVAL_YEAR_MONTH:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.INTERVALYEAR);
}
return (ValueExpressions.getIntervalYear(((BigDecimal) (literal.getValue())).intValue()));
case INTERVAL_DAY_TIME:
if (isLiteralNull(literal)) {
return createNullExpr(MinorType.INTERVALDAY);
}
return (ValueExpressions.getIntervalDay(((BigDecimal) (literal.getValue())).longValue()));
case NULL:
return NullExpression.INSTANCE;
case ANY:
if (isLiteralNull(literal)) {
return NullExpression.INSTANCE;
}
default:
throw new UnsupportedOperationException(String.format("Unable to convert the value of %s and type %s to a Drill constant expression.", literal, literal.getType().getSqlTypeName()));
}
}
}
private static final TypedNullConstant createNullExpr(MinorType type) {
return new TypedNullConstant(Types.optional(type));
}
private static boolean isLiteralNull(RexLiteral literal) {
return literal.getTypeName().getName().equals("NULL");
}
}
| DRILL-1099: Inject space (' ') as the default second argument to [lbr]trim
* Optiq already handles this for "trim".
| exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java | DRILL-1099: Inject space (' ') as the default second argument to [lbr]trim | <ide><path>xec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
<ide> trimArgs.add(args.get(1));
<ide>
<ide> return FunctionCallFactory.createExpression(trimFunc, trimArgs);
<add> } else if (functionName.equals("ltrim") || functionName.equals("rtrim") || functionName.equals("btrim")) {
<add> if (args.size() == 1) {
<add> args.add(ValueExpressions.getChar(" "));
<add> }
<add> return FunctionCallFactory.createExpression(functionName, args);
<ide> } else if (functionName.equals("date_part")) {
<ide> // Rewrite DATE_PART functions as extract functions
<ide> // assert that the function has exactly two arguments |
|
Java | apache-2.0 | 14af40ee9c66cd543c5c904be8f072e06abda47b | 0 | openengsb/openengsb,openengsb/openengsb,openengsb/openengsb,openengsb/openengsb,openengsb/openengsb,openengsb/openengsb | /**
* Licensed to the Austrian Association for Software Tool Integration (AASTI)
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. The AASTI licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openengsb.itests.exam;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import java.util.Dictionary;
import java.util.Hashtable;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.openengsb.core.api.AliveState;
import org.openengsb.core.api.context.ContextHolder;
import org.openengsb.core.api.workflow.RuleManager;
import org.openengsb.core.api.workflow.WorkflowService;
import org.openengsb.core.api.workflow.model.RuleBaseElementId;
import org.openengsb.core.api.workflow.model.RuleBaseElementType;
import org.openengsb.core.common.AbstractOpenEngSBService;
import org.openengsb.domain.example.ExampleDomain;
import org.openengsb.domain.example.event.LogEvent;
import org.openengsb.itests.util.AbstractPreConfiguredExamTestHelper;
import org.ops4j.pax.exam.junit.JUnit4TestRunner;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.core.context.SecurityContextHolder;
@RunWith(JUnit4TestRunner.class)
// This one will run each test in it's own container (slower speed)
// @ExamReactorStrategy(AllConfinedStagedReactorFactory.class)
public class WorkflowIT extends AbstractPreConfiguredExamTestHelper {
public static class DummyLogDomain extends AbstractOpenEngSBService implements ExampleDomain {
private boolean wasCalled = false;
@Override
public String doSomething(String message) {
wasCalled = true;
return "something";
}
@Override
public AliveState getAliveState() {
return AliveState.OFFLINE;
}
@Override
public String doSomething(ExampleEnum exampleEnum) {
wasCalled = true;
return "something";
}
@Override
public String doSomethingWithLogEvent(LogEvent event) {
wasCalled = true;
return "something";
}
public boolean isWasCalled() {
return wasCalled;
}
}
@Test
public void testCorrectContextHolderStrategy() throws Exception {
assertThat(SecurityContextHolder.getContextHolderStrategy().getClass().getSimpleName(),
is("InheritableThreadLocalSecurityContextHolderStrategy"));
}
@Test
public void testCreateRuleAndTriggerDomain() throws Exception {
DummyLogDomain exampleMock = new DummyLogDomain();
Dictionary<String, Object> properties = new Hashtable<String, Object>();
properties.put("domain", "example");
properties.put("connector", "example");
properties.put("location.foo", "example2");
getBundleContext().registerService(ExampleDomain.class.getName(), exampleMock, properties);
RuleManager ruleManager = getOsgiService(RuleManager.class);
ruleManager.addImport(ExampleDomain.class.getName());
ruleManager.addImport(LogEvent.class.getName());
ruleManager.addGlobal(ExampleDomain.class.getName(), "example2");
ruleManager.add(new RuleBaseElementId(RuleBaseElementType.Rule, "example-trigger"), "" +
"when\n" +
" l : LogEvent()\n" +
"then\n" +
" example2.doSomething(\"42\");\n"
);
ContextHolder.get().setCurrentContextId("foo");
WorkflowService workflowService = getOsgiService(WorkflowService.class);
authenticate("admin", "password");
workflowService.processEvent(new LogEvent());
assertThat(exampleMock.wasCalled, is(true));
}
/**
* Ignored because security manager is commented in the moment.
*/
@Ignore
@Test(expected = AccessDeniedException.class)
public void testUserAccessToRuleManager_shouldThrowException() throws Exception {
authenticate("user", "password");
}
}
| itests/src/test/java/org/openengsb/itests/exam/WorkflowIT.java | /**
* Licensed to the Austrian Association for Software Tool Integration (AASTI)
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. The AASTI licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openengsb.itests.exam;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.openengsb.core.api.AliveState;
import org.openengsb.core.common.AbstractOpenEngSBService;
import org.openengsb.domain.example.ExampleDomain;
import org.openengsb.domain.example.event.LogEvent;
import org.openengsb.itests.util.AbstractPreConfiguredExamTestHelper;
import org.ops4j.pax.exam.junit.JUnit4TestRunner;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.core.context.SecurityContextHolder;
@RunWith(JUnit4TestRunner.class)
// This one will run each test in it's own container (slower speed)
// @ExamReactorStrategy(AllConfinedStagedReactorFactory.class)
public class WorkflowIT extends AbstractPreConfiguredExamTestHelper {
public static class DummyLogDomain extends AbstractOpenEngSBService implements ExampleDomain {
private boolean wasCalled = false;
@Override
public String doSomething(String message) {
wasCalled = true;
return "something";
}
@Override
public AliveState getAliveState() {
return AliveState.OFFLINE;
}
@Override
public String doSomething(ExampleEnum exampleEnum) {
wasCalled = true;
return "something";
}
@Override
public String doSomethingWithLogEvent(LogEvent event) {
wasCalled = true;
return "something";
}
public boolean isWasCalled() {
return wasCalled;
}
}
@Test
public void testCorrectContextHolderStrategy() throws Exception {
assertThat(SecurityContextHolder.getContextHolderStrategy().getClass().getSimpleName(),
is("InheritableThreadLocalSecurityContextHolderStrategy"));
}
/**
* Ignored because security manager is commented in the moment.
*/
@Ignore
@Test(expected = AccessDeniedException.class)
public void testUserAccessToRuleManager_shouldThrowException() throws Exception {
authenticate("user", "password");
}
}
| create additional itest for workflow-service
| itests/src/test/java/org/openengsb/itests/exam/WorkflowIT.java | create additional itest for workflow-service | <ide><path>tests/src/test/java/org/openengsb/itests/exam/WorkflowIT.java
<ide> import static org.hamcrest.CoreMatchers.is;
<ide> import static org.junit.Assert.assertThat;
<ide>
<add>import java.util.Dictionary;
<add>import java.util.Hashtable;
<add>
<ide> import org.junit.Ignore;
<ide> import org.junit.Test;
<ide> import org.junit.runner.RunWith;
<ide> import org.openengsb.core.api.AliveState;
<add>import org.openengsb.core.api.context.ContextHolder;
<add>import org.openengsb.core.api.workflow.RuleManager;
<add>import org.openengsb.core.api.workflow.WorkflowService;
<add>import org.openengsb.core.api.workflow.model.RuleBaseElementId;
<add>import org.openengsb.core.api.workflow.model.RuleBaseElementType;
<ide> import org.openengsb.core.common.AbstractOpenEngSBService;
<ide> import org.openengsb.domain.example.ExampleDomain;
<ide> import org.openengsb.domain.example.event.LogEvent;
<ide> is("InheritableThreadLocalSecurityContextHolderStrategy"));
<ide> }
<ide>
<add> @Test
<add> public void testCreateRuleAndTriggerDomain() throws Exception {
<add> DummyLogDomain exampleMock = new DummyLogDomain();
<add> Dictionary<String, Object> properties = new Hashtable<String, Object>();
<add> properties.put("domain", "example");
<add> properties.put("connector", "example");
<add> properties.put("location.foo", "example2");
<add> getBundleContext().registerService(ExampleDomain.class.getName(), exampleMock, properties);
<add>
<add> RuleManager ruleManager = getOsgiService(RuleManager.class);
<add>
<add> ruleManager.addImport(ExampleDomain.class.getName());
<add> ruleManager.addImport(LogEvent.class.getName());
<add>
<add> ruleManager.addGlobal(ExampleDomain.class.getName(), "example2");
<add>
<add> ruleManager.add(new RuleBaseElementId(RuleBaseElementType.Rule, "example-trigger"), "" +
<add> "when\n" +
<add> " l : LogEvent()\n" +
<add> "then\n" +
<add> " example2.doSomething(\"42\");\n"
<add> );
<add>
<add> ContextHolder.get().setCurrentContextId("foo");
<add> WorkflowService workflowService = getOsgiService(WorkflowService.class);
<add>
<add> authenticate("admin", "password");
<add> workflowService.processEvent(new LogEvent());
<add>
<add> assertThat(exampleMock.wasCalled, is(true));
<add> }
<add>
<ide> /**
<ide> * Ignored because security manager is commented in the moment.
<ide> */ |
|
Java | apache-2.0 | c35f28c34fc3528efbc54e6e38e7f8f52d153dc6 | 0 | reactor/reactor-netty,reactor/reactor-netty | /*
* Copyright (c) 2011-2019 Pivotal Software Inc, All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactor.netty.resources;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Queue;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandler;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.pool.ChannelHealthChecker;
import io.netty.channel.pool.ChannelPool;
import io.netty.channel.pool.ChannelPoolHandler;
import io.netty.util.AttributeKey;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.GenericFutureListener;
import io.netty.util.concurrent.Promise;
import io.netty.util.internal.PlatformDependent;
import reactor.core.Disposable;
import reactor.core.publisher.Mono;
import reactor.core.publisher.MonoProcessor;
import reactor.core.publisher.MonoSink;
import reactor.core.scheduler.Schedulers;
import reactor.netty.Connection;
import reactor.netty.ConnectionObserver;
import reactor.netty.channel.BootstrapHandlers;
import reactor.netty.channel.ChannelOperations;
import reactor.util.Logger;
import reactor.util.Loggers;
import reactor.util.annotation.NonNull;
import reactor.util.concurrent.Queues;
import reactor.util.context.Context;
import static reactor.netty.ReactorNetty.format;
/**
* @author Stephane Maldini
*/
final class PooledConnectionProvider implements ConnectionProvider {
interface PoolFactory {
ChannelPool newPool(Bootstrap b,
ChannelPoolHandler handler,
ChannelHealthChecker checker);
}
final ConcurrentMap<PoolKey, Pool> channelPools;
final String name;
final PoolFactory poolFactory;
final int maxConnections;
PooledConnectionProvider(String name, PoolFactory poolFactory) {
this.name = name;
this.poolFactory = poolFactory;
this.channelPools = PlatformDependent.newConcurrentHashMap();
this.maxConnections = -1;
}
PooledConnectionProvider(String name, PoolFactory poolFactory, int maxConnections) {
this.name = name;
this.poolFactory = poolFactory;
this.channelPools = PlatformDependent.newConcurrentHashMap();
this.maxConnections = maxConnections;
}
@Override
public void disposeWhen(@NonNull SocketAddress address) {
List<Map.Entry<PoolKey, Pool>> toDispose;
toDispose = channelPools.entrySet()
.stream()
.filter(p -> compareAddresses(p.getKey().holder, address))
.collect(Collectors.toList());
toDispose.forEach(e -> {
if (channelPools.remove(e.getKey(), e.getValue())) {
if(log.isDebugEnabled()){
log.debug("Disposing pool for {}", e.getKey().fqdn);
}
e.getValue().pool.close();
}
});
}
private boolean compareAddresses(SocketAddress origin, SocketAddress target) {
if (origin.equals(target)) {
return true;
}
else if (origin instanceof InetSocketAddress &&
target instanceof InetSocketAddress) {
InetSocketAddress isaOrigin = (InetSocketAddress) origin;
InetSocketAddress isaTarget = (InetSocketAddress) target;
InetAddress iaTarget = isaTarget.getAddress();
return iaTarget != null && iaTarget.isAnyLocalAddress() &&
isaOrigin.getPort() == isaTarget.getPort();
}
return false;
}
@Override
public Mono<Connection> acquire(Bootstrap b) {
return Mono.create(sink -> {
Bootstrap bootstrap = b.clone();
ChannelOperations.OnSetup opsFactory =
BootstrapHandlers.channelOperationFactory(bootstrap);
ConnectionObserver obs = BootstrapHandlers.connectionObserver(bootstrap);
NewConnectionProvider.convertLazyRemoteAddress(bootstrap);
ChannelHandler handler = bootstrap.config().handler();
PoolKey holder = new PoolKey(bootstrap.config().remoteAddress(),
handler != null ? handler.hashCode() : -1);
Pool pool = channelPools.computeIfAbsent(holder, poolKey -> {
if (log.isDebugEnabled()) {
log.debug("Creating new client pool [{}] for {}",
name,
bootstrap.config()
.remoteAddress());
}
return new Pool(bootstrap, poolFactory, opsFactory);
});
disposableAcquire(sink, obs, pool, false);
});
}
@Override
public Mono<Void> disposeLater() {
return Mono.<Void>fromRunnable(() -> {
Pool pool;
for (PoolKey key : channelPools.keySet()) {
pool = channelPools.remove(key);
if (pool != null) {
pool.close();
}
}
})
.subscribeOn(Schedulers.elastic());
}
@Override
public boolean isDisposed() {
return channelPools.isEmpty() || channelPools.values()
.stream()
.allMatch(AtomicBoolean::get);
}
@Override
public int maxConnections() {
return maxConnections;
}
@Override
public String toString() {
return "PooledConnectionProvider {" +
"name=" + name +
", poolFactory=" + poolFactory +
'}';
}
@SuppressWarnings("FutureReturnValueIgnored")
static void disposableAcquire(MonoSink<Connection> sink, ConnectionObserver obs, Pool pool, boolean retried) {
Future<Channel> f = pool.acquire();
DisposableAcquire disposableAcquire =
new DisposableAcquire(sink, f, pool, obs, retried);
// Returned value is deliberately ignored
f.addListener(disposableAcquire);
sink.onCancel(disposableAcquire);
}
static final Logger log = Loggers.getLogger(PooledConnectionProvider.class);
static final AttributeKey<ConnectionObserver> OWNER =
AttributeKey.valueOf("connectionOwner");
final static class Pool extends AtomicBoolean
implements ChannelPoolHandler, ChannelPool, ChannelHealthChecker {
final ChannelPool pool;
final EventLoopGroup defaultGroup;
final Bootstrap bootstrap;
final ChannelOperations.OnSetup opsFactory;
final AtomicInteger activeConnections = new AtomicInteger();
final AtomicInteger inactiveConnections = new AtomicInteger();
final Future<Boolean> HEALTHY;
final Future<Boolean> UNHEALTHY;
Pool(Bootstrap bootstrap,
PoolFactory provider,
ChannelOperations.OnSetup opsFactory) {
this.bootstrap = bootstrap;
this.opsFactory = opsFactory;
this.pool = provider.newPool(bootstrap, this, this);
this.defaultGroup = bootstrap.config()
.group();
HEALTHY = defaultGroup.next()
.newSucceededFuture(true);
UNHEALTHY = defaultGroup.next()
.newSucceededFuture(false);
}
@Override
public Future<Boolean> isHealthy(Channel channel) {
return channel.isActive() ? HEALTHY : UNHEALTHY;
}
@Override
public Future<Channel> acquire() {
return acquire(defaultGroup.next()
.newPromise());
}
@Override
public Future<Channel> acquire(Promise<Channel> promise) {
return pool.acquire(promise);
}
@Override
public Future<Void> release(Channel channel) {
return pool.release(channel);
}
@Override
public Future<Void> release(Channel channel, Promise<Void> promise) {
return pool.release(channel, promise);
}
@Override
public void close() {
if (compareAndSet(false, true)) {
pool.close();
}
}
@Override
public void channelReleased(Channel ch) {
activeConnections.decrementAndGet();
inactiveConnections.incrementAndGet();
if (log.isDebugEnabled()) {
log.debug(format(ch, "Channel cleaned, now {} active connections and {} inactive connections"),
activeConnections, inactiveConnections);
}
}
@Override
public void channelAcquired(Channel ch) {
}
@Override
public void channelCreated(Channel ch) {
/*
Sometimes the Channel can be notified as created (by FixedChannelPool) but
it actually fails to connect and the FixedChannelPool will decrement its
active count, same as if it was released. The channel close promise is
still invoked, which can lead to double-decrement and an assertion error.
As such, it is best to only register the close handler on the channel in
`PooledClientContextHandler`.
see https://github.com/reactor/reactor-netty/issues/289
*/
inactiveConnections.incrementAndGet();
if (log.isDebugEnabled()) {
log.debug(format(ch, "Created new pooled channel, now {} active connections and {} inactive connections"),
activeConnections, inactiveConnections);
}
PooledConnection pooledConnection = new PooledConnection(ch, this);
pooledConnection.bind();
Bootstrap bootstrap = this.bootstrap.clone();
BootstrapHandlers.finalizeHandler(bootstrap, opsFactory, pooledConnection);
ch.pipeline()
.addFirst(bootstrap.config()
.handler());
}
@Override
public String toString() {
return "{ bootstrap=" + bootstrap +
", activeConnections=" + activeConnections +
", inactiveConnections=" + inactiveConnections +
'}';
}
}
final static class PendingConnectionObserver implements ConnectionObserver {
final Queue<Pending> pendingQueue = Queues.<Pending>unbounded(4).get();
@Override
public void onUncaughtException(Connection connection, Throwable error) {
pendingQueue.add(new Pending(connection, error, null));
}
@Override
public void onStateChange(Connection connection, State newState) {
pendingQueue.add(new Pending(connection, null, newState));
}
static class Pending {
final Connection connection;
final Throwable error;
final State state;
Pending(Connection connection, @Nullable Throwable error, @Nullable State state) {
this.connection = connection;
this.error = error;
this.state = state;
}
}
}
final static class PooledConnection implements Connection, ConnectionObserver {
final Channel channel;
final Pool pool;
final MonoProcessor<Void> onTerminate;
PooledConnection(Channel channel, Pool pool) {
this.channel = channel;
this.pool = pool;
this.onTerminate = MonoProcessor.create();
}
ConnectionObserver owner() {
ConnectionObserver obs;
for (;;) {
obs = channel.attr(OWNER)
.get();
if (obs == null) {
obs = new PendingConnectionObserver();
}
else {
return obs;
}
if (channel.attr(OWNER)
.compareAndSet(null, obs)) {
return obs;
}
}
}
@Override
public Mono<Void> onTerminate() {
return onTerminate.or(onDispose());
}
@Override
public Channel channel() {
return channel;
}
@Override
public Context currentContext() {
return owner().currentContext();
}
@Override
public void onUncaughtException(Connection connection, Throwable error) {
owner().onUncaughtException(connection, error);
}
@Override
public void onStateChange(Connection connection, State newState) {
if(log.isDebugEnabled()) {
log.debug(format(connection.channel(), "onStateChange({}, {})"), connection, newState);
}
if (newState == State.DISCONNECTING) {
if (!isPersistent() && channel.isActive()) {
//will be released by closeFuture internals
channel.close();
owner().onStateChange(connection, State.DISCONNECTING);
return;
}
if (!channel.isActive()) {
owner().onStateChange(connection, State.DISCONNECTING);
//will be released by poolResources internals
return;
}
if (log.isDebugEnabled()) {
log.debug(format(connection.channel(), "Releasing channel"));
}
ConnectionObserver obs = channel.attr(OWNER)
.getAndSet(ConnectionObserver.emptyListener());
pool.release(channel)
.addListener(f -> {
if (log.isDebugEnabled() && !f.isSuccess()) {
log.debug("Failed cleaning the channel from pool", f.cause());
}
onTerminate.onComplete();
obs.onStateChange(connection, State.RELEASED);
});
return;
}
owner().onStateChange(connection, newState);
}
@Override
public String toString() {
return "PooledConnection{" + "channel=" + channel + '}';
}
}
final static class DisposableAcquire
implements Disposable, GenericFutureListener<Future<Channel>>,
ConnectionObserver , Runnable {
final Future<Channel> f;
final MonoSink<Connection> sink;
final Pool pool;
final ConnectionObserver obs;
final boolean retried;
DisposableAcquire(MonoSink<Connection> sink,
Future<Channel> future,
Pool pool,
ConnectionObserver obs,
boolean retried) {
this.f = future;
this.pool = pool;
this.sink = sink;
this.obs = obs;
this.retried = retried;
}
@Override
public final void dispose() {
if (isDisposed()) {
return;
}
// Returned value is deliberately ignored
f.removeListener(this);
if (!f.isDone()) {
f.cancel(true);
}
}
@Override
public Context currentContext() {
return sink.currentContext();
}
@Override
public void onUncaughtException(Connection connection, Throwable error) {
sink.error(error);
obs.onUncaughtException(connection, error);
}
@Override
public void onStateChange(Connection connection, State newState) {
if (newState == State.CONFIGURED) {
sink.success(connection);
}
obs.onStateChange(connection, newState);
}
@Override
public boolean isDisposed() {
return f.isCancelled() || f.isDone();
}
@Override
public void run() {
Channel c = f.getNow();
pool.activeConnections.incrementAndGet();
pool.inactiveConnections.decrementAndGet();
ConnectionObserver current = c.attr(OWNER)
.getAndSet(this);
if (current instanceof PendingConnectionObserver) {
PendingConnectionObserver pending = (PendingConnectionObserver)current;
PendingConnectionObserver.Pending p;
current = null;
registerClose(c, pool);
while((p = pending.pendingQueue.poll()) != null) {
if (p.error != null) {
onUncaughtException(p.connection, p.error);
}
else if (p.state != null) {
onStateChange(p.connection, p.state);
}
}
}
else if (current == null) {
registerClose(c, pool);
}
if (current != null) {
Connection conn = Connection.from(c);
if (log.isDebugEnabled()) {
log.debug(format(c, "Channel acquired, now {} active connections and {} inactive connections"),
pool.activeConnections, pool.inactiveConnections);
}
obs.onStateChange(conn, State.ACQUIRED);
PooledConnection con = conn.as(PooledConnection.class);
if (con != null) {
ChannelOperations<?, ?> ops = pool.opsFactory.create(con, con, null);
if (ops != null) {
ops.bind();
obs.onStateChange(ops, State.CONFIGURED);
sink.success(ops);
}
else {
//already configured, just forward the connection
sink.success(con);
}
}
else {
//already bound, just forward the connection
sink.success(conn);
}
return;
}
//Connected, leave onStateChange forward the event if factory
if (log.isDebugEnabled()) {
log.debug(format(c, "Channel connected, now {} active " +
"connections and {} inactive connections"),
pool.activeConnections, pool.inactiveConnections);
}
if (pool.opsFactory == ChannelOperations.OnSetup.empty()) {
sink.success(Connection.from(c));
}
}
// The close lambda expression refers to pool,
// so it will have a implicit reference to `DisposableAcquire.this`,
// As a result this will avoid GC from recycling other references of this.
// Use Pool in the method declaration to avoid it.
void registerClose(Channel c, Pool pool) {
if (log.isDebugEnabled()) {
log.debug(format(c, "Registering pool release on close event for channel"));
}
c.closeFuture()
.addListener(ff -> {
if (AttributeKey.exists("channelPool")) {
pool.release(c);
}
pool.inactiveConnections.decrementAndGet();
if (log.isDebugEnabled()) {
log.debug(format(c, "Channel closed, now {} active connections and {} inactive connections"),
pool.activeConnections, pool.inactiveConnections);
}
});
}
@Override
public final void operationComplete(Future<Channel> f) throws Exception {
if (!f.isSuccess()) {
if (f.isCancelled()) {
pool.inactiveConnections.decrementAndGet();
if (log.isDebugEnabled()) {
log.debug("Cancelled acquiring from pool {}", pool);
}
return;
}
Throwable cause = f.cause();
if (cause != null) {
if (!(cause instanceof TimeoutException) && !(cause instanceof IllegalStateException)) {
pool.inactiveConnections.decrementAndGet();
}
sink.error(f.cause());
}
else {
pool.inactiveConnections.decrementAndGet();
sink.error(new IOException("Error while acquiring from " + pool));
}
}
else {
Channel c = f.get();
if (!c.isActive()) {
registerClose(c, pool);
if (!retried) {
if (log.isDebugEnabled()) {
log.debug(format(c, "Immediately aborted pooled channel, re-acquiring new channel"));
}
disposableAcquire(sink, obs, pool, true);
}
else {
Throwable cause = f.cause();
if (cause != null) {
sink.error(cause);
}
else {
sink.error(new IOException("Error while acquiring from " + pool));
}
}
}
if (c.eventLoop().inEventLoop()) {
run();
}
else {
c.eventLoop()
.execute(this);
}
}
}
}
final static class PoolKey {
final SocketAddress holder;
final int pipelineKey;
final String fqdn;
PoolKey(SocketAddress holder, int pipelineKey) {
this.holder = holder;
this.fqdn = holder instanceof InetSocketAddress ? holder.toString() : null;
this.pipelineKey = pipelineKey;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PoolKey poolKey = (PoolKey) o;
return pipelineKey == poolKey.pipelineKey &&
Objects.equals(holder, poolKey.holder) &&
Objects.equals(fqdn, poolKey.fqdn);
}
@Override
public int hashCode() {
return Objects.hash(holder, pipelineKey, fqdn);
}
}
}
| src/main/java/reactor/netty/resources/PooledConnectionProvider.java | /*
* Copyright (c) 2011-2019 Pivotal Software Inc, All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactor.netty.resources;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Queue;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandler;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.pool.ChannelHealthChecker;
import io.netty.channel.pool.ChannelPool;
import io.netty.channel.pool.ChannelPoolHandler;
import io.netty.util.AttributeKey;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.GenericFutureListener;
import io.netty.util.concurrent.Promise;
import io.netty.util.internal.PlatformDependent;
import reactor.core.Disposable;
import reactor.core.publisher.Mono;
import reactor.core.publisher.MonoProcessor;
import reactor.core.publisher.MonoSink;
import reactor.core.scheduler.Schedulers;
import reactor.netty.Connection;
import reactor.netty.ConnectionObserver;
import reactor.netty.channel.BootstrapHandlers;
import reactor.netty.channel.ChannelOperations;
import reactor.util.Logger;
import reactor.util.Loggers;
import reactor.util.annotation.NonNull;
import reactor.util.concurrent.Queues;
import reactor.util.context.Context;
import static reactor.netty.ReactorNetty.format;
/**
* @author Stephane Maldini
*/
final class PooledConnectionProvider implements ConnectionProvider {
interface PoolFactory {
ChannelPool newPool(Bootstrap b,
ChannelPoolHandler handler,
ChannelHealthChecker checker);
}
final ConcurrentMap<PoolKey, Pool> channelPools;
final String name;
final PoolFactory poolFactory;
final int maxConnections;
PooledConnectionProvider(String name, PoolFactory poolFactory) {
this.name = name;
this.poolFactory = poolFactory;
this.channelPools = PlatformDependent.newConcurrentHashMap();
this.maxConnections = -1;
}
PooledConnectionProvider(String name, PoolFactory poolFactory, int maxConnections) {
this.name = name;
this.poolFactory = poolFactory;
this.channelPools = PlatformDependent.newConcurrentHashMap();
this.maxConnections = maxConnections;
}
@Override
public void disposeWhen(@NonNull SocketAddress address) {
List<Map.Entry<PoolKey, Pool>> toDispose;
toDispose = channelPools.entrySet()
.stream()
.filter(p -> compareAddresses(p.getKey().holder, address))
.collect(Collectors.toList());
toDispose.forEach(e -> {
if (channelPools.remove(e.getKey(), e.getValue())) {
if(log.isDebugEnabled()){
log.debug("Disposing pool for {}", e.getKey().fqdn);
}
e.getValue().pool.close();
}
});
}
private boolean compareAddresses(SocketAddress origin, SocketAddress target) {
if (origin.equals(target)) {
return true;
}
else if (origin instanceof InetSocketAddress &&
target instanceof InetSocketAddress) {
InetSocketAddress isaOrigin = (InetSocketAddress) origin;
InetSocketAddress isaTarget = (InetSocketAddress) target;
InetAddress iaTarget = isaTarget.getAddress();
return iaTarget != null && iaTarget.isAnyLocalAddress() &&
isaOrigin.getPort() == isaTarget.getPort();
}
return false;
}
@Override
public Mono<Connection> acquire(Bootstrap b) {
return Mono.create(sink -> {
Bootstrap bootstrap = b.clone();
ChannelOperations.OnSetup opsFactory =
BootstrapHandlers.channelOperationFactory(bootstrap);
ConnectionObserver obs = BootstrapHandlers.connectionObserver(bootstrap);
NewConnectionProvider.convertLazyRemoteAddress(bootstrap);
ChannelHandler handler = bootstrap.config().handler();
PoolKey holder = new PoolKey(bootstrap.config().remoteAddress(),
handler != null ? handler.hashCode() : -1);
Pool pool;
for (; ; ) {
pool = channelPools.get(holder);
if (pool != null) {
break;
}
pool = new Pool(bootstrap, poolFactory, opsFactory);
if (channelPools.putIfAbsent(holder, pool) == null) {
if (log.isDebugEnabled()) {
log.debug("Creating new client pool [{}] for {}",
name,
bootstrap.config()
.remoteAddress());
}
break;
}
pool.close();
}
disposableAcquire(sink, obs, pool, false);
});
}
@Override
public Mono<Void> disposeLater() {
return Mono.<Void>fromRunnable(() -> {
Pool pool;
for (PoolKey key : channelPools.keySet()) {
pool = channelPools.remove(key);
if (pool != null) {
pool.close();
}
}
})
.subscribeOn(Schedulers.elastic());
}
@Override
public boolean isDisposed() {
return channelPools.isEmpty() || channelPools.values()
.stream()
.allMatch(AtomicBoolean::get);
}
@Override
public int maxConnections() {
return maxConnections;
}
@Override
public String toString() {
return "PooledConnectionProvider {" +
"name=" + name +
", poolFactory=" + poolFactory +
'}';
}
@SuppressWarnings("FutureReturnValueIgnored")
static void disposableAcquire(MonoSink<Connection> sink, ConnectionObserver obs, Pool pool, boolean retried) {
Future<Channel> f = pool.acquire();
DisposableAcquire disposableAcquire =
new DisposableAcquire(sink, f, pool, obs, retried);
// Returned value is deliberately ignored
f.addListener(disposableAcquire);
sink.onCancel(disposableAcquire);
}
static final Logger log = Loggers.getLogger(PooledConnectionProvider.class);
static final AttributeKey<ConnectionObserver> OWNER =
AttributeKey.valueOf("connectionOwner");
final static class Pool extends AtomicBoolean
implements ChannelPoolHandler, ChannelPool, ChannelHealthChecker {
final ChannelPool pool;
final EventLoopGroup defaultGroup;
final Bootstrap bootstrap;
final ChannelOperations.OnSetup opsFactory;
final AtomicInteger activeConnections = new AtomicInteger();
final AtomicInteger inactiveConnections = new AtomicInteger();
final Future<Boolean> HEALTHY;
final Future<Boolean> UNHEALTHY;
Pool(Bootstrap bootstrap,
PoolFactory provider,
ChannelOperations.OnSetup opsFactory) {
this.bootstrap = bootstrap;
this.opsFactory = opsFactory;
this.pool = provider.newPool(bootstrap, this, this);
this.defaultGroup = bootstrap.config()
.group();
HEALTHY = defaultGroup.next()
.newSucceededFuture(true);
UNHEALTHY = defaultGroup.next()
.newSucceededFuture(false);
}
@Override
public Future<Boolean> isHealthy(Channel channel) {
return channel.isActive() ? HEALTHY : UNHEALTHY;
}
@Override
public Future<Channel> acquire() {
return acquire(defaultGroup.next()
.newPromise());
}
@Override
public Future<Channel> acquire(Promise<Channel> promise) {
return pool.acquire(promise);
}
@Override
public Future<Void> release(Channel channel) {
return pool.release(channel);
}
@Override
public Future<Void> release(Channel channel, Promise<Void> promise) {
return pool.release(channel, promise);
}
@Override
public void close() {
if (compareAndSet(false, true)) {
pool.close();
}
}
@Override
public void channelReleased(Channel ch) {
activeConnections.decrementAndGet();
inactiveConnections.incrementAndGet();
if (log.isDebugEnabled()) {
log.debug(format(ch, "Channel cleaned, now {} active connections and {} inactive connections"),
activeConnections, inactiveConnections);
}
}
@Override
public void channelAcquired(Channel ch) {
}
@Override
public void channelCreated(Channel ch) {
/*
Sometimes the Channel can be notified as created (by FixedChannelPool) but
it actually fails to connect and the FixedChannelPool will decrement its
active count, same as if it was released. The channel close promise is
still invoked, which can lead to double-decrement and an assertion error.
As such, it is best to only register the close handler on the channel in
`PooledClientContextHandler`.
see https://github.com/reactor/reactor-netty/issues/289
*/
inactiveConnections.incrementAndGet();
if (log.isDebugEnabled()) {
log.debug(format(ch, "Created new pooled channel, now {} active connections and {} inactive connections"),
activeConnections, inactiveConnections);
}
PooledConnection pooledConnection = new PooledConnection(ch, this);
pooledConnection.bind();
Bootstrap bootstrap = this.bootstrap.clone();
BootstrapHandlers.finalizeHandler(bootstrap, opsFactory, pooledConnection);
ch.pipeline()
.addFirst(bootstrap.config()
.handler());
}
@Override
public String toString() {
return "{ bootstrap=" + bootstrap +
", activeConnections=" + activeConnections +
", inactiveConnections=" + inactiveConnections +
'}';
}
}
final static class PendingConnectionObserver implements ConnectionObserver {
final Queue<Pending> pendingQueue = Queues.<Pending>unbounded(4).get();
@Override
public void onUncaughtException(Connection connection, Throwable error) {
pendingQueue.add(new Pending(connection, error, null));
}
@Override
public void onStateChange(Connection connection, State newState) {
pendingQueue.add(new Pending(connection, null, newState));
}
static class Pending {
final Connection connection;
final Throwable error;
final State state;
Pending(Connection connection, @Nullable Throwable error, @Nullable State state) {
this.connection = connection;
this.error = error;
this.state = state;
}
}
}
final static class PooledConnection implements Connection, ConnectionObserver {
final Channel channel;
final Pool pool;
final MonoProcessor<Void> onTerminate;
PooledConnection(Channel channel, Pool pool) {
this.channel = channel;
this.pool = pool;
this.onTerminate = MonoProcessor.create();
}
ConnectionObserver owner() {
ConnectionObserver obs;
for (;;) {
obs = channel.attr(OWNER)
.get();
if (obs == null) {
obs = new PendingConnectionObserver();
}
else {
return obs;
}
if (channel.attr(OWNER)
.compareAndSet(null, obs)) {
return obs;
}
}
}
@Override
public Mono<Void> onTerminate() {
return onTerminate.or(onDispose());
}
@Override
public Channel channel() {
return channel;
}
@Override
public Context currentContext() {
return owner().currentContext();
}
@Override
public void onUncaughtException(Connection connection, Throwable error) {
owner().onUncaughtException(connection, error);
}
@Override
public void onStateChange(Connection connection, State newState) {
if(log.isDebugEnabled()) {
log.debug(format(connection.channel(), "onStateChange({}, {})"), connection, newState);
}
if (newState == State.DISCONNECTING) {
if (!isPersistent() && channel.isActive()) {
//will be released by closeFuture internals
channel.close();
owner().onStateChange(connection, State.DISCONNECTING);
return;
}
if (!channel.isActive()) {
owner().onStateChange(connection, State.DISCONNECTING);
//will be released by poolResources internals
return;
}
if (log.isDebugEnabled()) {
log.debug(format(connection.channel(), "Releasing channel"));
}
ConnectionObserver obs = channel.attr(OWNER)
.getAndSet(ConnectionObserver.emptyListener());
pool.release(channel)
.addListener(f -> {
if (log.isDebugEnabled() && !f.isSuccess()) {
log.debug("Failed cleaning the channel from pool", f.cause());
}
onTerminate.onComplete();
obs.onStateChange(connection, State.RELEASED);
});
return;
}
owner().onStateChange(connection, newState);
}
@Override
public String toString() {
return "PooledConnection{" + "channel=" + channel + '}';
}
}
final static class DisposableAcquire
implements Disposable, GenericFutureListener<Future<Channel>>,
ConnectionObserver , Runnable {
final Future<Channel> f;
final MonoSink<Connection> sink;
final Pool pool;
final ConnectionObserver obs;
final boolean retried;
DisposableAcquire(MonoSink<Connection> sink,
Future<Channel> future,
Pool pool,
ConnectionObserver obs,
boolean retried) {
this.f = future;
this.pool = pool;
this.sink = sink;
this.obs = obs;
this.retried = retried;
}
@Override
public final void dispose() {
if (isDisposed()) {
return;
}
// Returned value is deliberately ignored
f.removeListener(this);
if (!f.isDone()) {
f.cancel(true);
}
}
@Override
public Context currentContext() {
return sink.currentContext();
}
@Override
public void onUncaughtException(Connection connection, Throwable error) {
sink.error(error);
obs.onUncaughtException(connection, error);
}
@Override
public void onStateChange(Connection connection, State newState) {
if (newState == State.CONFIGURED) {
sink.success(connection);
}
obs.onStateChange(connection, newState);
}
@Override
public boolean isDisposed() {
return f.isCancelled() || f.isDone();
}
@Override
public void run() {
Channel c = f.getNow();
pool.activeConnections.incrementAndGet();
pool.inactiveConnections.decrementAndGet();
ConnectionObserver current = c.attr(OWNER)
.getAndSet(this);
if (current instanceof PendingConnectionObserver) {
PendingConnectionObserver pending = (PendingConnectionObserver)current;
PendingConnectionObserver.Pending p;
current = null;
registerClose(c, pool);
while((p = pending.pendingQueue.poll()) != null) {
if (p.error != null) {
onUncaughtException(p.connection, p.error);
}
else if (p.state != null) {
onStateChange(p.connection, p.state);
}
}
}
else if (current == null) {
registerClose(c, pool);
}
if (current != null) {
Connection conn = Connection.from(c);
if (log.isDebugEnabled()) {
log.debug(format(c, "Channel acquired, now {} active connections and {} inactive connections"),
pool.activeConnections, pool.inactiveConnections);
}
obs.onStateChange(conn, State.ACQUIRED);
PooledConnection con = conn.as(PooledConnection.class);
if (con != null) {
ChannelOperations<?, ?> ops = pool.opsFactory.create(con, con, null);
if (ops != null) {
ops.bind();
obs.onStateChange(ops, State.CONFIGURED);
sink.success(ops);
}
else {
//already configured, just forward the connection
sink.success(con);
}
}
else {
//already bound, just forward the connection
sink.success(conn);
}
return;
}
//Connected, leave onStateChange forward the event if factory
if (log.isDebugEnabled()) {
log.debug(format(c, "Channel connected, now {} active " +
"connections and {} inactive connections"),
pool.activeConnections, pool.inactiveConnections);
}
if (pool.opsFactory == ChannelOperations.OnSetup.empty()) {
sink.success(Connection.from(c));
}
}
// The close lambda expression refers to pool,
// so it will have a implicit reference to `DisposableAcquire.this`,
// As a result this will avoid GC from recycling other references of this.
// Use Pool in the method declaration to avoid it.
void registerClose(Channel c, Pool pool) {
if (log.isDebugEnabled()) {
log.debug(format(c, "Registering pool release on close event for channel"));
}
c.closeFuture()
.addListener(ff -> {
if (AttributeKey.exists("channelPool")) {
pool.release(c);
}
pool.inactiveConnections.decrementAndGet();
if (log.isDebugEnabled()) {
log.debug(format(c, "Channel closed, now {} active connections and {} inactive connections"),
pool.activeConnections, pool.inactiveConnections);
}
});
}
@Override
public final void operationComplete(Future<Channel> f) throws Exception {
if (!f.isSuccess()) {
if (f.isCancelled()) {
pool.inactiveConnections.decrementAndGet();
if (log.isDebugEnabled()) {
log.debug("Cancelled acquiring from pool {}", pool);
}
return;
}
Throwable cause = f.cause();
if (cause != null) {
if (!(cause instanceof TimeoutException) && !(cause instanceof IllegalStateException)) {
pool.inactiveConnections.decrementAndGet();
}
sink.error(f.cause());
}
else {
pool.inactiveConnections.decrementAndGet();
sink.error(new IOException("Error while acquiring from " + pool));
}
}
else {
Channel c = f.get();
if (!c.isActive()) {
registerClose(c, pool);
if (!retried) {
if (log.isDebugEnabled()) {
log.debug(format(c, "Immediately aborted pooled channel, re-acquiring new channel"));
}
disposableAcquire(sink, obs, pool, true);
}
else {
Throwable cause = f.cause();
if (cause != null) {
sink.error(cause);
}
else {
sink.error(new IOException("Error while acquiring from " + pool));
}
}
}
if (c.eventLoop().inEventLoop()) {
run();
}
else {
c.eventLoop()
.execute(this);
}
}
}
}
final static class PoolKey {
final SocketAddress holder;
final int pipelineKey;
final String fqdn;
PoolKey(SocketAddress holder, int pipelineKey) {
this.holder = holder;
this.fqdn = holder instanceof InetSocketAddress ? holder.toString() : null;
this.pipelineKey = pipelineKey;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PoolKey poolKey = (PoolKey) o;
return pipelineKey == poolKey.pipelineKey &&
Objects.equals(holder, poolKey.holder) &&
Objects.equals(fqdn, poolKey.fqdn);
}
@Override
public int hashCode() {
return Objects.hash(holder, pipelineKey, fqdn);
}
}
}
| Avoid calling `Pool.close()` which will block reactor thread.
| src/main/java/reactor/netty/resources/PooledConnectionProvider.java | Avoid calling `Pool.close()` which will block reactor thread. | <ide><path>rc/main/java/reactor/netty/resources/PooledConnectionProvider.java
<ide> PoolKey holder = new PoolKey(bootstrap.config().remoteAddress(),
<ide> handler != null ? handler.hashCode() : -1);
<ide>
<del> Pool pool;
<del> for (; ; ) {
<del> pool = channelPools.get(holder);
<del> if (pool != null) {
<del> break;
<del> }
<del> pool = new Pool(bootstrap, poolFactory, opsFactory);
<del> if (channelPools.putIfAbsent(holder, pool) == null) {
<del> if (log.isDebugEnabled()) {
<del> log.debug("Creating new client pool [{}] for {}",
<del> name,
<del> bootstrap.config()
<del> .remoteAddress());
<del> }
<del> break;
<del> }
<del> pool.close();
<del> }
<add> Pool pool = channelPools.computeIfAbsent(holder, poolKey -> {
<add> if (log.isDebugEnabled()) {
<add> log.debug("Creating new client pool [{}] for {}",
<add> name,
<add> bootstrap.config()
<add> .remoteAddress());
<add> }
<add> return new Pool(bootstrap, poolFactory, opsFactory);
<add> });
<ide>
<ide> disposableAcquire(sink, obs, pool, false);
<ide> |
|
Java | apache-2.0 | a94b313f0ef3c59dd2765d908e3f93e761ed464c | 0 | NyBatis/NyBatisCore | package org.nybatis.core.executor;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import org.nybatis.core.conf.Const;
import org.nybatis.core.exception.unchecked.CommandLineException;
import org.nybatis.core.log.NLogger;
import org.nybatis.core.worker.WorkerReadLine;
/**
* OS command line executor
*
* @author [email protected]
*/
public class CommandExecutor {
private Process process = null;
private BufferedWriter processPipe = null;
private ProcessOutputThread outputThread = null;
private ProcessOutputThread errorThread = null;
/**
* Command 명령어를 수행한다.
*
* @param commandLine 명령어
* @return self instance
*/
public CommandExecutor run( String commandLine ) {
return run( commandLine, null, null );
}
/**
* Command 명령어를 수행한다.
*
* @param commandLine 명령어
* @param worker 출력결과를 기반으로 처리를 수행할 작업자
* @return self instance
*/
public CommandExecutor run( String commandLine, WorkerReadLine worker ) {
return run( commandLine, null, worker );
}
/**
* 명령어를 수행한다.
*
* @param commandLine 명령어
* @param outputMessage 프로세스에서 출력한 메세지를 담을 객체
* @return self instance
*/
public CommandExecutor run( String commandLine, StringBuffer outputMessage ) {
return run( commandLine, outputMessage, null );
}
/**
* 명령어를 수행한다.
*
* @param commandLine 명령어
* @param outputMessage 프로세스에서 출력한 메세지를 담을 객체
* @param worker 출력결과를 기반으로 처리를 수행할 작업자
* @return self instance
*/
public CommandExecutor run( String commandLine, StringBuffer outputMessage, WorkerReadLine worker ) {
Command command = new Command();
command.set( commandLine );
command.setOutputPipe( outputMessage );
command.setWorker( worker );
return run( command );
}
/**
* 명령어를 수행한다.
*
* @param command 커맨드
* @return self instance
*/
public CommandExecutor run( Command command ) {
if( command == null ) return this;
if( isAlive() ) throw new CommandLineException( "pre-command is still running" );
if( ! command.hasCommand() ) throw new CommandLineException( "there is no command to execute" );
NLogger.debug( "Command Line : {}", command );
try {
ProcessBuilder builder = new ProcessBuilder( command.get() );
if( command.getWorkingDirectory() != null ) {
builder.directory( command.getWorkingDirectory() );
}
process = builder.start();
errorThread = new ProcessOutputThread( process.getErrorStream(), command.getErrorPipe(), command.getWorker() );
errorThread.setDaemon( true );
outputThread = new ProcessOutputThread( process.getInputStream(), command.getOutputPipe(), command.getWorker() );
outputThread.setDaemon( true );
errorThread.start();
outputThread.start();
processPipe = new BufferedWriter( new OutputStreamWriter( process.getOutputStream(), Const.platform.osCharset ) );
return this;
} catch ( IOException e ) {
throw new CommandLineException( e, "It happens ERROR while executing command ({})", command );
}
}
/**
* check whether process is executing or not.
*
* @return true if process is executing.
*/
public boolean isAlive() {
if( process != null && process.isAlive() ) return true;
if( outputThread != null && outputThread.isAlive() ) return true;
return errorThread != null && errorThread.isAlive();
}
/**
* get process termination code
*
* @return the exit value of the subprocess represented by this
* {@code Process} object. By convention, the value
* {@code 0} indicates normal termination.
* @throws IllegalThreadStateException if the subprocess represented
* by this {@code Process} object has not yet terminated
*/
public int getExitValue() {
if( process == null ) throw new IllegalThreadStateException( "process is null." );
return process.exitValue();
}
/**
* wait until process is closed.
*
* @param timeout max wait time (mili-seconds)
* @return process termination code ( 0 : success )
*/
public int waitFor( Integer timeout ) {
if( ! isAlive() ) return 0;
int exitValue = 0;
try {
exitValue = process.waitFor();
} catch ( InterruptedException e ) {
process.destroy();
} finally {
Thread.interrupted();
}
waitThread( outputThread, timeout );
waitThread( errorThread, timeout );
destroy();
return exitValue;
}
private void waitThread( ProcessOutputThread thread, Integer timeOut ) {
if( thread == null || ! thread.isAlive() ) return;
try {
if( timeOut == null ) {
thread.join();
} else {
thread.join( timeOut );
}
} catch( InterruptedException e ) {
thread.interrupt();
}
}
/**
* wait until process is closed.
*
* @return process termination code ( 0 : success )
*/
public int waitFor() {
return waitFor( null );
}
/**
* terminate process forcibly.
*/
public void destroy() {
if( process != null ) {
process.destroyForcibly();
process = null;
}
destroyThread( outputThread );
destroyThread( errorThread );
if( processPipe != null ) {
try {
processPipe.close();
} catch( IOException e ) {
NLogger.error( e );
} finally {
processPipe = null;
}
}
}
private void destroyThread( ProcessOutputThread thread ) {
if( thread == null ) return;
thread.interrupt();
}
/**
* 프로세스에 명령어를 전송한다.
*
* @param command 전송할 명령어
* @return self instance
*/
public CommandExecutor sendCommand( String command ) {
if( processPipe == null ) return this;
NLogger.debug( "command to send : {}", command );
try {
processPipe.write( command );
processPipe.write( "\n" );
processPipe.flush();
} catch( IOException e ) {
NLogger.error( e );
}
return this;
}
}
| src/main/java/org/nybatis/core/executor/CommandExecutor.java | package org.nybatis.core.executor;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import org.nybatis.core.conf.Const;
import org.nybatis.core.exception.unchecked.CommandLineException;
import org.nybatis.core.log.NLogger;
import org.nybatis.core.worker.WorkerReadLine;
/**
* OS command line executor
*
* @author [email protected]
*/
public class CommandExecutor {
private Process process = null;
private BufferedWriter processPipe = null;
private ProcessOutputThread outputThread = null;
private ProcessOutputThread errorThread = null;
/**
* Command 명령어를 수행한다.
*
* @param commandLine 명령어
* @return self instance
*/
public CommandExecutor run( String commandLine ) {
return run( commandLine, null, null );
}
/**
* Command 명령어를 수행한다.
*
* @param commandLine 명령어
* @param worker 출력결과를 기반으로 처리를 수행할 작업자
* @return self instance
*/
public CommandExecutor run( String commandLine, WorkerReadLine worker ) {
return run( commandLine, null, worker );
}
/**
* 명령어를 수행한다.
*
* @param commandLine 명령어
* @param outputMessage 프로세스에서 출력한 메세지를 담을 객체
* @return self instance
*/
public CommandExecutor run( String commandLine, StringBuffer outputMessage ) {
return run( commandLine, outputMessage, null );
}
/**
* 명령어를 수행한다.
*
* @param commandLine 명령어
* @param outputMessage 프로세스에서 출력한 메세지를 담을 객체
* @param worker 출력결과를 기반으로 처리를 수행할 작업자
* @return self instance
*/
public CommandExecutor run( String commandLine, StringBuffer outputMessage, WorkerReadLine worker ) {
Command command = new Command();
command.set( commandLine );
command.setOutputPipe( outputMessage );
command.setWorker( worker );
return run( command );
}
/**
* 명령어를 수행한다.
*
* @param command 커맨드
* @return self instance
*/
public CommandExecutor run( Command command ) {
if( command == null ) return this;
if( isAlive() ) throw new CommandLineException( "pre-command is still running" );
if( ! command.hasCommand() ) throw new CommandLineException( "there is no command to execute" );
NLogger.debug( "Command Line : {}", command );
try {
ProcessBuilder builder = new ProcessBuilder( command.get() );
if( command.getWorkingDirectory() != null ) {
builder.directory( command.getWorkingDirectory() );
}
process = builder.start();
errorThread = new ProcessOutputThread( process.getErrorStream(), command.getErrorPipe(), command.getWorker() );
errorThread.setDaemon( true );
outputThread = new ProcessOutputThread( process.getInputStream(), command.getOutputPipe(), command.getWorker() );
outputThread.setDaemon( true );
errorThread.start();
outputThread.start();
processPipe = new BufferedWriter( new OutputStreamWriter( process.getOutputStream(), Const.platform.osCharset ) );
return this;
} catch ( IOException e ) {
throw new CommandLineException( e, "It happens ERROR while executing command ({})", command );
}
}
/**
* 실행중인지 여부를 확인한다.
*
* @return 실행중인지 여부
*/
public boolean isAlive() {
if( process != null && process.isAlive() ) return true;
if( outputThread != null && outputThread.isAlive() ) return true;
return errorThread != null && errorThread.isAlive();
}
/**
* get process termination code
*
* @return the exit value of the subprocess represented by this
* {@code Process} object. By convention, the value
* {@code 0} indicates normal termination.
* @throws IllegalThreadStateException if the subprocess represented
* by this {@code Process} object has not yet terminated
*/
public int getExitValue() {
if( process == null ) throw new IllegalThreadStateException( "process is null." );
return process.exitValue();
}
/**
* wait until process is closed.
*
* @param timeout max wait time (mili-seconds)
* @return process termination code ( 0 : success )
*/
public int waitFor( Long timeout ) {
if( ! isAlive() ) return 0;
int exitValue = 0;
try {
exitValue = process.waitFor();
} catch ( InterruptedException e ) {
process.destroy();
} finally {
Thread.interrupted();
}
waitThread( outputThread, timeout );
waitThread( errorThread, timeout );
destroy();
return exitValue;
}
private void waitThread( ProcessOutputThread thread, Long timeOut ) {
if( thread == null || ! thread.isAlive() ) return;
try {
if( timeOut == null ) {
thread.join();
} else {
thread.join( timeOut );
}
} catch( InterruptedException e ) {
thread.interrupt();
}
}
/**
* wait until process is closed.
*
* @return process termination code ( 0 : success )
*/
public int waitFor() {
return waitFor( null );
}
/**
* terminate process forcibly.
*/
public void destroy() {
if( process != null ) {
process.destroyForcibly();
process = null;
}
destroyThread( outputThread );
destroyThread( errorThread );
if( processPipe != null ) {
try {
processPipe.close();
} catch( IOException e ) {
NLogger.error( e );
} finally {
processPipe = null;
}
}
}
private void destroyThread( ProcessOutputThread thread ) {
if( thread == null ) return;
thread.interrupt();
}
/**
* 프로세스에 명령어를 전송한다.
*
* @param command 전송할 명령어
* @return self instance
*/
public CommandExecutor sendCommand( String command ) {
if( processPipe == null ) return this;
NLogger.debug( "command to send : {}", command );
try {
processPipe.write( command );
processPipe.write( "\n" );
processPipe.flush();
} catch( IOException e ) {
NLogger.error( e );
}
return this;
}
}
| change wait time parameter type from Long to Integer.
| src/main/java/org/nybatis/core/executor/CommandExecutor.java | change wait time parameter type from Long to Integer. | <ide><path>rc/main/java/org/nybatis/core/executor/CommandExecutor.java
<ide> }
<ide>
<ide> /**
<del> * 실행중인지 여부를 확인한다.
<del> *
<del> * @return 실행중인지 여부
<add> * check whether process is executing or not.
<add> *
<add> * @return true if process is executing.
<ide> */
<ide> public boolean isAlive() {
<ide>
<ide> * @param timeout max wait time (mili-seconds)
<ide> * @return process termination code ( 0 : success )
<ide> */
<del> public int waitFor( Long timeout ) {
<add> public int waitFor( Integer timeout ) {
<ide>
<ide> if( ! isAlive() ) return 0;
<ide>
<ide>
<ide> }
<ide>
<del> private void waitThread( ProcessOutputThread thread, Long timeOut ) {
<add> private void waitThread( ProcessOutputThread thread, Integer timeOut ) {
<ide>
<ide> if( thread == null || ! thread.isAlive() ) return;
<ide> |
|
Java | agpl-3.0 | 353611f463ed78161808bfc58077cfc43d487fa9 | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | c35442c2-2e5f-11e5-9284-b827eb9e62be | hello.java | c34e91d8-2e5f-11e5-9284-b827eb9e62be | c35442c2-2e5f-11e5-9284-b827eb9e62be | hello.java | c35442c2-2e5f-11e5-9284-b827eb9e62be | <ide><path>ello.java
<del>c34e91d8-2e5f-11e5-9284-b827eb9e62be
<add>c35442c2-2e5f-11e5-9284-b827eb9e62be |
|
Java | apache-2.0 | f56388a37d2b83f2415164d9808623fe62be3a4f | 0 | amith01994/intellij-community,adedayo/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,fitermay/intellij-community,gnuhub/intellij-community,caot/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,ibinti/intellij-community,wreckJ/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,allotria/intellij-community,kdwink/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,vladmm/intellij-community,caot/intellij-community,ryano144/intellij-community,FHannes/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,holmes/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,suncycheng/intellij-community,tmpgit/intellij-community,kool79/intellij-community,slisson/intellij-community,izonder/intellij-community,petteyg/intellij-community,orekyuu/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,suncycheng/intellij-community,signed/intellij-community,signed/intellij-community,blademainer/intellij-community,jagguli/intellij-community,xfournet/intellij-community,izonder/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,kool79/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,MER-GROUP/intellij-community,gnuhub/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,kdwink/intellij-community,retomerz/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,nicolargo/intellij-community,holmes/intellij-community,blademainer/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,supersven/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,apixandru/intellij-community,dslomov/intellij-community,TangHao1987/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,caot/intellij-community,nicolargo/intellij-community,da1z/intellij-community,signed/intellij-community,fitermay/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,supersven/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,ftomassetti/intellij-community,semonte/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,fnouama/intellij-community,retomerz/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,ryano144/intellij-community,da1z/intellij-community,semonte/intellij-community,petteyg/intellij-community,semonte/intellij-community,retomerz/intellij-community,holmes/intellij-community,semonte/intellij-community,kool79/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,FHannes/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,TangHao1987/intellij-community,caot/intellij-community,allotria/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,jagguli/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,nicolargo/intellij-community,holmes/intellij-community,holmes/intellij-community,amith01994/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,samthor/intellij-community,ernestp/consulo,TangHao1987/intellij-community,fitermay/intellij-community,slisson/intellij-community,samthor/intellij-community,Lekanich/intellij-community,hurricup/intellij-community,supersven/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,supersven/intellij-community,ibinti/intellij-community,allotria/intellij-community,supersven/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,caot/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,ivan-fedorov/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,signed/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,petteyg/intellij-community,petteyg/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,consulo/consulo,izonder/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,amith01994/intellij-community,robovm/robovm-studio,signed/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,blademainer/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,diorcety/intellij-community,consulo/consulo,mglukhikh/intellij-community,da1z/intellij-community,asedunov/intellij-community,fnouama/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,consulo/consulo,fitermay/intellij-community,dslomov/intellij-community,ernestp/consulo,apixandru/intellij-community,ryano144/intellij-community,semonte/intellij-community,fnouama/intellij-community,da1z/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,retomerz/intellij-community,amith01994/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,supersven/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,kool79/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,holmes/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,nicolargo/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,asedunov/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,SerCeMan/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,amith01994/intellij-community,asedunov/intellij-community,clumsy/intellij-community,signed/intellij-community,hurricup/intellij-community,hurricup/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,FHannes/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,ThiagoGarciaAlves/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,fitermay/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,clumsy/intellij-community,clumsy/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,ernestp/consulo,MichaelNedzelsky/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,fitermay/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,ftomassetti/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,robovm/robovm-studio,retomerz/intellij-community,ernestp/consulo,FHannes/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,TangHao1987/intellij-community,SerCeMan/intellij-community,kool79/intellij-community,kool79/intellij-community,da1z/intellij-community,dslomov/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,izonder/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,caot/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,apixandru/intellij-community,diorcety/intellij-community,diorcety/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,jagguli/intellij-community,apixandru/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,petteyg/intellij-community,amith01994/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,vvv1559/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,orekyuu/intellij-community,robovm/robovm-studio,retomerz/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,xfournet/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,samthor/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,allotria/intellij-community,asedunov/intellij-community,consulo/consulo,wreckJ/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,da1z/intellij-community,ibinti/intellij-community,signed/intellij-community,kool79/intellij-community,hurricup/intellij-community,SerCeMan/intellij-community,retomerz/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,pwoodworth/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,fnouama/intellij-community,apixandru/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,ahb0327/intellij-community,izonder/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,supersven/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,blademainer/intellij-community,supersven/intellij-community,izonder/intellij-community,asedunov/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,consulo/consulo,samthor/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,blademainer/intellij-community,semonte/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,TangHao1987/intellij-community,blademainer/intellij-community,petteyg/intellij-community,holmes/intellij-community,slisson/intellij-community,hurricup/intellij-community,fitermay/intellij-community,signed/intellij-community,fnouama/intellij-community,dslomov/intellij-community,jagguli/intellij-community,apixandru/intellij-community,hurricup/intellij-community,ryano144/intellij-community,allotria/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,caot/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,fnouama/intellij-community,ol-loginov/intellij-community,michaelgallacher/intellij-community,jagguli/intellij-community,adedayo/intellij-community,caot/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,ryano144/intellij-community,Lekanich/intellij-community,ftomassetti/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,tmpgit/intellij-community,ahb0327/intellij-community,blademainer/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,hurricup/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,kdwink/intellij-community,supersven/intellij-community,da1z/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,holmes/intellij-community,clumsy/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,pwoodworth/intellij-community,ernestp/consulo,akosyakov/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,slisson/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,petteyg/intellij-community,kool79/intellij-community,signed/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,samthor/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,consulo/consulo,Lekanich/intellij-community,akosyakov/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,samthor/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,slisson/intellij-community,semonte/intellij-community,ibinti/intellij-community,da1z/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,signed/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,SerCeMan/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,slisson/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,da1z/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,da1z/intellij-community,adedayo/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,michaelgallacher/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,allotria/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,apixandru/intellij-community,allotria/intellij-community,ahb0327/intellij-community,caot/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,adedayo/intellij-community,ernestp/consulo,robovm/robovm-studio,allotria/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,orekyuu/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,fnouama/intellij-community,robovm/robovm-studio,jagguli/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,slisson/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,clumsy/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,allotria/intellij-community,signed/intellij-community,clumsy/intellij-community,adedayo/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,signed/intellij-community,vladmm/intellij-community,petteyg/intellij-community,fnouama/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,ahb0327/intellij-community,ahb0327/intellij-community,vladmm/intellij-community | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.JDOMUtil;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.io.StreamUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiFormatUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.ConcurrencyUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.StringBuilderSpinAllocator;
import com.intellij.util.containers.ConcurrentWeakHashMap;
import com.intellij.util.containers.ConcurrentWeakValueHashMap;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import gnu.trove.THashMap;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
public abstract class BaseExternalAnnotationsManager extends ExternalAnnotationsManager {
private static final Logger LOG = Logger.getInstance("#" + BaseExternalAnnotationsManager.class.getName());
@NotNull private static final List<PsiFile> NULL = new ArrayList<PsiFile>();
@NotNull protected final ConcurrentMap<String, List<PsiFile>> myExternalAnnotations = new ConcurrentWeakValueHashMap<String, List<PsiFile>>();
protected final PsiManager myPsiManager;
public BaseExternalAnnotationsManager(final PsiManager psiManager) {
myPsiManager = psiManager;
}
@Nullable
protected static String getExternalName(PsiModifierListOwner listOwner, boolean showParamName) {
return PsiFormatUtil.getExternalName(listOwner, showParamName, Integer.MAX_VALUE);
}
@Nullable
protected static String getFQN(@NotNull String packageName, @NotNull PsiFile psiFile) {
VirtualFile virtualFile = psiFile.getVirtualFile();
if (virtualFile == null) return null;
return StringUtil.getQualifiedName(packageName, virtualFile.getNameWithoutExtension());
}
@Nullable
protected static String getNormalizedExternalName(@NotNull PsiModifierListOwner owner) {
String externalName = getExternalName(owner, true);
if (externalName != null) {
if (owner instanceof PsiParameter && owner.getParent() instanceof PsiParameterList) {
final PsiMethod method = PsiTreeUtil.getParentOfType(owner, PsiMethod.class);
if (method != null) {
externalName =
externalName.substring(0, externalName.lastIndexOf(' ') + 1) + method.getParameterList().getParameterIndex((PsiParameter)owner);
}
}
final int idx = externalName.indexOf('(');
if (idx == -1) return externalName;
final StringBuilder buf = StringBuilderSpinAllocator.alloc();
try {
final int rightIdx = externalName.indexOf(')');
final String[] params = externalName.substring(idx + 1, rightIdx).split(",");
buf.append(externalName.substring(0, idx + 1));
for (String param : params) {
param = param.trim();
final int spaceIdx = param.indexOf(' ');
buf.append(spaceIdx > -1 ? param.substring(0, spaceIdx) : param).append(", ");
}
return StringUtil.trimEnd(buf.toString(), ", ") + externalName.substring(rightIdx);
}
finally {
StringBuilderSpinAllocator.dispose(buf);
}
}
return externalName;
}
protected abstract boolean hasAnyAnnotationsRoots();
@Override
@Nullable
public PsiAnnotation findExternalAnnotation(@NotNull final PsiModifierListOwner listOwner, @NotNull final String annotationFQN) {
return collectExternalAnnotations(listOwner).get(annotationFQN);
}
@Override
public boolean isExternalAnnotationWritable(@NotNull PsiModifierListOwner listOwner, @NotNull String annotationFQN) {
// note that this method doesn't cache it's result
Map<String, PsiAnnotation> map = doCollect(listOwner, true);
return map.containsKey(annotationFQN);
}
@Override
@Nullable
public PsiAnnotation[] findExternalAnnotations(@NotNull final PsiModifierListOwner listOwner) {
final Map<String, PsiAnnotation> result = collectExternalAnnotations(listOwner);
return result.isEmpty() ? null : result.values().toArray(new PsiAnnotation[result.size()]);
}
private final ConcurrentMap<PsiModifierListOwner, Map<String, PsiAnnotation>> cache = new ConcurrentWeakHashMap<PsiModifierListOwner, Map<String, PsiAnnotation>>();
@NotNull
private Map<String, PsiAnnotation> collectExternalAnnotations(@NotNull final PsiModifierListOwner listOwner) {
if (!hasAnyAnnotationsRoots()) return Collections.emptyMap();
Map<String, PsiAnnotation> map = cache.get(listOwner);
if (map == null) {
map = doCollect(listOwner, false);
map = ConcurrencyUtil.cacheOrGet(cache, listOwner, map);
}
return map;
}
private static final MultiMap<String, AnnotationData> EMPTY = new MultiMap<String, AnnotationData>();
private ConcurrentMap<PsiFile, Pair<MultiMap<String, AnnotationData>, Long>> annotationsFileToDataAndModificationStamp = new ConcurrentWeakHashMap<PsiFile, Pair<MultiMap<String, AnnotationData>, Long>>();
@NotNull
private MultiMap<String, AnnotationData> getDataFromFile(@NotNull PsiFile file) {
Pair<MultiMap<String, AnnotationData>, Long> cached = annotationsFileToDataAndModificationStamp.get(file);
if (cached != null && cached.getSecond() == file.getModificationStamp()) {
return cached.getFirst();
}
else {
Document document;
try {
VirtualFile virtualFile = file.getVirtualFile();
if (virtualFile == null) return EMPTY;
document = JDOMUtil.loadDocument(escapeAttributes(StreamUtil.readText(virtualFile.getInputStream())));
}
catch (IOException e) {
LOG.error(e);
return EMPTY;
}
catch (JDOMException e) {
LOG.error(e);
return EMPTY;
}
Element rootElement = document.getRootElement();
if (rootElement == null) return EMPTY;
MultiMap<String, AnnotationData> data = new MultiMap<String, AnnotationData>();
//noinspection unchecked
for (Element element : (List<Element>) rootElement.getChildren()) {
String ownerName = element.getAttributeValue("name");
if (ownerName == null) continue;
//noinspection unchecked
for (Element annotationElement : (List<Element>) element.getChildren()) {
String annotationFQN = annotationElement.getAttributeValue("name");
if (StringUtil.isEmpty(annotationFQN)) continue;
StringBuilder buf = new StringBuilder();
//noinspection unchecked
for (Element annotationParameter : (List<Element>) annotationElement.getChildren()) {
buf.append(",");
String nameValue = annotationParameter.getAttributeValue("name");
if (nameValue != null) {
buf.append(nameValue).append("=");
}
buf.append(annotationParameter.getAttributeValue("val"));
}
String annotationText = "@" + annotationFQN + (buf.length() > 0 ? "(" + StringUtil.trimStart(buf.toString(), ",") + ")" : "");
data.putValue(ownerName, new AnnotationData(annotationFQN, annotationText));
}
}
Pair<MultiMap<String, AnnotationData>, Long> pair = Pair.create(data, file.getModificationStamp());
pair = ConcurrencyUtil.cacheOrGet(annotationsFileToDataAndModificationStamp, file, pair);
data = pair.first;
return data;
}
}
@NotNull
private Map<String, PsiAnnotation> doCollect(@NotNull PsiModifierListOwner listOwner, boolean onlyWritable) {
final List<PsiFile> files = findExternalAnnotationsFiles(listOwner);
if (files == null) {
return Collections.emptyMap();
}
Map<String, PsiAnnotation> result = new THashMap<String, PsiAnnotation>();
String externalName = getExternalName(listOwner, false);
String oldExternalName = getNormalizedExternalName(listOwner);
for (PsiFile file : files) {
if (!file.isValid()) continue;
if (onlyWritable && !file.isWritable()) continue;
MultiMap<String, AnnotationData> fileData = getDataFromFile(file);
for (AnnotationData annotationData : ContainerUtil.concat(fileData.get(externalName), fileData.get(oldExternalName))) {
// don't add annotation, if there already is one with this FQ name
if (result.containsKey(annotationData.annotationClassFqName)) continue;
try {
PsiElementFactory factory = JavaPsiFacade.getInstance(myPsiManager.getProject()).getElementFactory();
PsiAnnotation annotation = factory.createAnnotationFromText(annotationData.annotationText, null);
result.put(annotationData.annotationClassFqName, annotation);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
}
return result;
}
@NotNull
protected abstract List<VirtualFile> getExternalAnnotationsRoots(@NotNull VirtualFile libraryFile);
@Override
@Nullable
public List<PsiFile> findExternalAnnotationsFiles(@NotNull PsiModifierListOwner listOwner) {
final PsiFile containingFile = listOwner.getContainingFile();
if (!(containingFile instanceof PsiJavaFile)) {
return null;
}
final PsiJavaFile javaFile = (PsiJavaFile)containingFile;
final String packageName = javaFile.getPackageName();
final VirtualFile virtualFile = containingFile.getVirtualFile();
String fqn = getFQN(packageName, containingFile);
if (fqn == null) return null;
final List<PsiFile> files = myExternalAnnotations.get(fqn);
if (files == NULL) return null;
if (files != null) {
boolean allValid = true;
for (PsiFile file : files) {
allValid &= file.isValid();
}
if (allValid) {
return files;
}
}
if (virtualFile == null) {
return null;
}
ArrayList<PsiFile> possibleAnnotationsXmls = new ArrayList<PsiFile>();
for (VirtualFile root : getExternalAnnotationsRoots(virtualFile)) {
final VirtualFile ext = root.findFileByRelativePath(packageName.replace(".", "/") + "/" + ANNOTATIONS_XML);
if (ext == null) continue;
final PsiFile psiFile = myPsiManager.findFile(ext);
possibleAnnotationsXmls.add(psiFile);
}
if (!possibleAnnotationsXmls.isEmpty()) {
possibleAnnotationsXmls.trimToSize();
// sorting by writability: writable go first
Collections.sort(possibleAnnotationsXmls, new Comparator<PsiFile>() {
@Override
public int compare(PsiFile f1, PsiFile f2) {
boolean w1 = f1.isWritable();
boolean w2 = f2.isWritable();
if (w1 == w2) {
return 0;
}
return w1 ? -1 : 1;
}
});
myExternalAnnotations.put(fqn, possibleAnnotationsXmls);
return possibleAnnotationsXmls;
}
myExternalAnnotations.put(fqn, NULL);
return null;
}
protected void dropCache() {
myExternalAnnotations.clear();
annotationsFileToDataAndModificationStamp.clear();
cache.clear();
}
// This method is used for legacy reasons.
// Old external annotations sometimes are bad XML: they have "<" and ">" characters in attributes values. To prevent SAX parser from
// failing, we escape attributes values.
@NotNull
private static String escapeAttributes(@NotNull String invalidXml) {
// We assume that XML has single- and double-quote characters only for attribute values, therefore we don't any complex parsing,
// just have binary inAttribute state
StringBuilder buf = new StringBuilder();
boolean inAttribute = false;
for (int i = 0; i < invalidXml.length(); i++) {
char c = invalidXml.charAt(i);
if (inAttribute && c == '<') {
buf.append("<");
}
else if (inAttribute && c == '>') {
buf.append(">");
}
else if (c == '\"' || c == '\'') {
buf.append('\"');
inAttribute = !inAttribute;
}
else {
buf.append(c);
}
}
return buf.toString();
}
private static class AnnotationData {
@NotNull public String annotationClassFqName;
@NotNull public String annotationText;
private AnnotationData(@NotNull String annotationClassFqName, @NotNull String annotationText) {
this.annotationClassFqName = annotationClassFqName;
this.annotationText = annotationText;
}
}
}
| java/java-psi-api/src/com/intellij/codeInsight/BaseExternalAnnotationsManager.java | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.JDOMUtil;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.io.StreamUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiFormatUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.StringBuilderSpinAllocator;
import com.intellij.util.containers.ConcurrentWeakHashMap;
import com.intellij.util.containers.ConcurrentWeakValueHashMap;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
public abstract class BaseExternalAnnotationsManager extends ExternalAnnotationsManager {
private static final Logger LOG = Logger.getInstance("#" + BaseExternalAnnotationsManager.class.getName());
@NotNull private static final List<PsiFile> NULL = new ArrayList<PsiFile>();
@NotNull protected final ConcurrentMap<String, List<PsiFile>> myExternalAnnotations = new ConcurrentWeakValueHashMap<String, List<PsiFile>>();
protected final PsiManager myPsiManager;
public BaseExternalAnnotationsManager(final PsiManager psiManager) {
myPsiManager = psiManager;
}
@Nullable
protected static String getExternalName(PsiModifierListOwner listOwner, boolean showParamName) {
return PsiFormatUtil.getExternalName(listOwner, showParamName, Integer.MAX_VALUE);
}
@Nullable
protected static String getFQN(@NotNull String packageName, @NotNull PsiFile psiFile) {
VirtualFile virtualFile = psiFile.getVirtualFile();
if (virtualFile == null) return null;
return StringUtil.getQualifiedName(packageName, virtualFile.getNameWithoutExtension());
}
@Nullable
protected static String getNormalizedExternalName(@NotNull PsiModifierListOwner owner) {
String externalName = getExternalName(owner, true);
if (externalName != null) {
if (owner instanceof PsiParameter && owner.getParent() instanceof PsiParameterList) {
final PsiMethod method = PsiTreeUtil.getParentOfType(owner, PsiMethod.class);
if (method != null) {
externalName =
externalName.substring(0, externalName.lastIndexOf(' ') + 1) + method.getParameterList().getParameterIndex((PsiParameter)owner);
}
}
final int idx = externalName.indexOf('(');
if (idx == -1) return externalName;
final StringBuilder buf = StringBuilderSpinAllocator.alloc();
try {
final int rightIdx = externalName.indexOf(')');
final String[] params = externalName.substring(idx + 1, rightIdx).split(",");
buf.append(externalName.substring(0, idx + 1));
for (String param : params) {
param = param.trim();
final int spaceIdx = param.indexOf(' ');
buf.append(spaceIdx > -1 ? param.substring(0, spaceIdx) : param).append(", ");
}
return StringUtil.trimEnd(buf.toString(), ", ") + externalName.substring(rightIdx);
}
finally {
StringBuilderSpinAllocator.dispose(buf);
}
}
return externalName;
}
protected abstract boolean hasAnyAnnotationsRoots();
@Override
@Nullable
public PsiAnnotation findExternalAnnotation(@NotNull final PsiModifierListOwner listOwner, @NotNull final String annotationFQN) {
return collectExternalAnnotations(listOwner).get(annotationFQN);
}
@Override
public boolean isExternalAnnotationWritable(@NotNull PsiModifierListOwner listOwner, @NotNull String annotationFQN) {
// note that this method doesn't cache it's result
Map<String, PsiAnnotation> map = doCollect(listOwner, true);
return map.containsKey(annotationFQN);
}
@Override
@Nullable
public PsiAnnotation[] findExternalAnnotations(@NotNull final PsiModifierListOwner listOwner) {
final Map<String, PsiAnnotation> result = collectExternalAnnotations(listOwner);
return result.isEmpty() ? null : result.values().toArray(new PsiAnnotation[result.size()]);
}
private final Map<PsiModifierListOwner, Map<String, PsiAnnotation>> cache = new ConcurrentWeakHashMap<PsiModifierListOwner, Map<String, PsiAnnotation>>();
@NotNull
private Map<String, PsiAnnotation> collectExternalAnnotations(@NotNull final PsiModifierListOwner listOwner) {
if (!hasAnyAnnotationsRoots()) return Collections.emptyMap();
Map<String, PsiAnnotation> map = cache.get(listOwner);
if (map == null) {
map = doCollect(listOwner, false);
cache.put(listOwner, map);
}
return map;
}
protected ConcurrentMap<PsiFile, Pair<MultiMap<String, AnnotationData>, Long>> annotationsFileToDataAndModificationStamp
= new ConcurrentWeakHashMap<PsiFile, Pair<MultiMap<String, AnnotationData>, Long>>();
@NotNull
private MultiMap<String, AnnotationData> getDataFromFile(@NotNull PsiFile file) {
Pair<MultiMap<String, AnnotationData>, Long> cached = annotationsFileToDataAndModificationStamp.get(file);
if (cached != null && cached.getSecond() == file.getModificationStamp()) {
return cached.getFirst();
}
else {
MultiMap<String, AnnotationData> data = new MultiMap<String, AnnotationData>();
annotationsFileToDataAndModificationStamp.put(file, Pair.create(data, file.getModificationStamp()));
Document document;
try {
VirtualFile virtualFile = file.getVirtualFile();
if (virtualFile == null) return data;
document = JDOMUtil.loadDocument(escapeAttributes(StreamUtil.readText(virtualFile.getInputStream())));
}
catch (IOException e) {
LOG.error(e);
return data;
}
catch (JDOMException e) {
LOG.error(e);
return data;
}
Element rootElement = document.getRootElement();
if (rootElement == null) return data;
//noinspection unchecked
for (Element element : (List<Element>) rootElement.getChildren()) {
String ownerName = element.getAttributeValue("name");
if (ownerName == null) continue;
//noinspection unchecked
for (Element annotationElement : (List<Element>) element.getChildren()) {
String annotationFQN = annotationElement.getAttributeValue("name");
StringBuilder buf = new StringBuilder();
//noinspection unchecked
for (Element annotationParameter : (List<Element>) annotationElement.getChildren()) {
buf.append(",");
String nameValue = annotationParameter.getAttributeValue("name");
if (nameValue != null) {
buf.append(nameValue).append("=");
}
buf.append(annotationParameter.getAttributeValue("val"));
}
String annotationText = "@" + annotationFQN + (buf.length() > 0 ? "(" + StringUtil.trimStart(buf.toString(), ",") + ")" : "");
data.putValue(ownerName, new AnnotationData(annotationFQN, annotationText));
}
}
return data;
}
}
private Map<String, PsiAnnotation> doCollect(@NotNull PsiModifierListOwner listOwner, boolean onlyWritable) {
final List<PsiFile> files = findExternalAnnotationsFiles(listOwner);
if (files == null) {
return Collections.emptyMap();
}
Map<String, PsiAnnotation> result = new HashMap<String, PsiAnnotation>();
String externalName = getExternalName(listOwner, false);
String oldExternalName = getNormalizedExternalName(listOwner);
for (PsiFile file : files) {
if (!file.isValid()) continue;
if (onlyWritable && !file.isWritable()) continue;
MultiMap<String, AnnotationData> fileData = getDataFromFile(file);
for (AnnotationData annotationData : ContainerUtil.concat(fileData.get(externalName), fileData.get(oldExternalName))) {
// don't add annotation, if there already is one with this FQ name
if (result.containsKey(annotationData.annotationClassFqName)) continue;
try {
result.put(annotationData.annotationClassFqName,
JavaPsiFacade.getInstance(myPsiManager.getProject()).getElementFactory().createAnnotationFromText(
annotationData.annotationText, null));
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
}
return result;
}
@NotNull
protected abstract List<VirtualFile> getExternalAnnotationsRoots(@NotNull VirtualFile libraryFile);
@Override
@Nullable
public List<PsiFile> findExternalAnnotationsFiles(@NotNull PsiModifierListOwner listOwner) {
final PsiFile containingFile = listOwner.getContainingFile();
if (!(containingFile instanceof PsiJavaFile)) {
return null;
}
final PsiJavaFile javaFile = (PsiJavaFile)containingFile;
final String packageName = javaFile.getPackageName();
final VirtualFile virtualFile = containingFile.getVirtualFile();
String fqn = getFQN(packageName, containingFile);
if (fqn == null) return null;
final List<PsiFile> files = myExternalAnnotations.get(fqn);
if (files == NULL) return null;
if (files != null) {
boolean allValid = true;
for (PsiFile file : files) {
allValid &= file.isValid();
}
if (allValid) {
return files;
}
}
if (virtualFile == null) {
return null;
}
ArrayList<PsiFile> possibleAnnotationsXmls = new ArrayList<PsiFile>();
for (VirtualFile root : getExternalAnnotationsRoots(virtualFile)) {
final VirtualFile ext = root.findFileByRelativePath(packageName.replace(".", "/") + "/" + ANNOTATIONS_XML);
if (ext == null) continue;
final PsiFile psiFile = myPsiManager.findFile(ext);
possibleAnnotationsXmls.add(psiFile);
}
if (!possibleAnnotationsXmls.isEmpty()) {
possibleAnnotationsXmls.trimToSize();
// sorting by writability: writable go first
Collections.sort(possibleAnnotationsXmls, new Comparator<PsiFile>() {
@Override
public int compare(PsiFile f1, PsiFile f2) {
boolean w1 = f1.isWritable();
boolean w2 = f2.isWritable();
if (w1 == w2) {
return 0;
}
else {
return w1 ? -1 : 1;
}
}
});
myExternalAnnotations.put(fqn, possibleAnnotationsXmls);
return possibleAnnotationsXmls;
}
myExternalAnnotations.put(fqn, NULL);
return null;
}
protected void dropCache() {
myExternalAnnotations.clear();
annotationsFileToDataAndModificationStamp.clear();
cache.clear();
}
// This method is used for legacy reasons.
// Old external annotations sometimes are bad XML: they have "<" and ">" characters in attributes values. To prevent SAX parser from
// failing, we escape attributes values.
@NotNull
private static String escapeAttributes(@NotNull String invalidXml) {
// We assume that XML has single- and double-quote characters only for attribute values, therefore we don't any complex parsing,
// just have binary inAttribute state
StringBuilder buf = new StringBuilder();
boolean inAttribute = false;
for (int i = 0; i < invalidXml.length(); i++) {
char c = invalidXml.charAt(i);
if (inAttribute && c == '<') {
buf.append("<");
} else if (inAttribute && c == '>') {
buf.append(">");
} else if (c == '\"' || c == '\'') {
buf.append('\"');
inAttribute = !inAttribute;
} else {
buf.append(c);
}
}
return buf.toString();
}
private static class AnnotationData {
public String annotationClassFqName;
public String annotationText;
private AnnotationData(String annotationClassFqName, String annotationText) {
this.annotationClassFqName = annotationClassFqName;
this.annotationText = annotationText;
}
}
}
| data races
| java/java-psi-api/src/com/intellij/codeInsight/BaseExternalAnnotationsManager.java | data races | <ide><path>ava/java-psi-api/src/com/intellij/codeInsight/BaseExternalAnnotationsManager.java
<ide> import com.intellij.psi.*;
<ide> import com.intellij.psi.util.PsiFormatUtil;
<ide> import com.intellij.psi.util.PsiTreeUtil;
<add>import com.intellij.util.ConcurrencyUtil;
<ide> import com.intellij.util.IncorrectOperationException;
<ide> import com.intellij.util.StringBuilderSpinAllocator;
<ide> import com.intellij.util.containers.ConcurrentWeakHashMap;
<ide> import com.intellij.util.containers.ConcurrentWeakValueHashMap;
<ide> import com.intellij.util.containers.ContainerUtil;
<ide> import com.intellij.util.containers.MultiMap;
<add>import gnu.trove.THashMap;
<ide> import org.jdom.Document;
<ide> import org.jdom.Element;
<ide> import org.jdom.JDOMException;
<ide> return result.isEmpty() ? null : result.values().toArray(new PsiAnnotation[result.size()]);
<ide> }
<ide>
<del> private final Map<PsiModifierListOwner, Map<String, PsiAnnotation>> cache = new ConcurrentWeakHashMap<PsiModifierListOwner, Map<String, PsiAnnotation>>();
<add> private final ConcurrentMap<PsiModifierListOwner, Map<String, PsiAnnotation>> cache = new ConcurrentWeakHashMap<PsiModifierListOwner, Map<String, PsiAnnotation>>();
<ide> @NotNull
<ide> private Map<String, PsiAnnotation> collectExternalAnnotations(@NotNull final PsiModifierListOwner listOwner) {
<ide> if (!hasAnyAnnotationsRoots()) return Collections.emptyMap();
<ide> Map<String, PsiAnnotation> map = cache.get(listOwner);
<ide> if (map == null) {
<ide> map = doCollect(listOwner, false);
<del> cache.put(listOwner, map);
<add> map = ConcurrencyUtil.cacheOrGet(cache, listOwner, map);
<ide> }
<ide> return map;
<ide> }
<ide>
<del> protected ConcurrentMap<PsiFile, Pair<MultiMap<String, AnnotationData>, Long>> annotationsFileToDataAndModificationStamp
<del> = new ConcurrentWeakHashMap<PsiFile, Pair<MultiMap<String, AnnotationData>, Long>>();
<add> private static final MultiMap<String, AnnotationData> EMPTY = new MultiMap<String, AnnotationData>();
<add> private ConcurrentMap<PsiFile, Pair<MultiMap<String, AnnotationData>, Long>> annotationsFileToDataAndModificationStamp = new ConcurrentWeakHashMap<PsiFile, Pair<MultiMap<String, AnnotationData>, Long>>();
<ide> @NotNull
<ide> private MultiMap<String, AnnotationData> getDataFromFile(@NotNull PsiFile file) {
<ide> Pair<MultiMap<String, AnnotationData>, Long> cached = annotationsFileToDataAndModificationStamp.get(file);
<ide> return cached.getFirst();
<ide> }
<ide> else {
<del> MultiMap<String, AnnotationData> data = new MultiMap<String, AnnotationData>();
<del> annotationsFileToDataAndModificationStamp.put(file, Pair.create(data, file.getModificationStamp()));
<del>
<ide> Document document;
<ide> try {
<ide> VirtualFile virtualFile = file.getVirtualFile();
<del> if (virtualFile == null) return data;
<add> if (virtualFile == null) return EMPTY;
<ide> document = JDOMUtil.loadDocument(escapeAttributes(StreamUtil.readText(virtualFile.getInputStream())));
<ide> }
<ide> catch (IOException e) {
<ide> LOG.error(e);
<del> return data;
<add> return EMPTY;
<ide> }
<ide> catch (JDOMException e) {
<ide> LOG.error(e);
<del> return data;
<add> return EMPTY;
<ide> }
<ide> Element rootElement = document.getRootElement();
<del> if (rootElement == null) return data;
<add> if (rootElement == null) return EMPTY;
<add>
<add> MultiMap<String, AnnotationData> data = new MultiMap<String, AnnotationData>();
<ide>
<ide> //noinspection unchecked
<ide> for (Element element : (List<Element>) rootElement.getChildren()) {
<ide> //noinspection unchecked
<ide> for (Element annotationElement : (List<Element>) element.getChildren()) {
<ide> String annotationFQN = annotationElement.getAttributeValue("name");
<add> if (StringUtil.isEmpty(annotationFQN)) continue;
<ide> StringBuilder buf = new StringBuilder();
<ide> //noinspection unchecked
<ide> for (Element annotationParameter : (List<Element>) annotationElement.getChildren()) {
<ide> }
<ide> }
<ide>
<add> Pair<MultiMap<String, AnnotationData>, Long> pair = Pair.create(data, file.getModificationStamp());
<add> pair = ConcurrencyUtil.cacheOrGet(annotationsFileToDataAndModificationStamp, file, pair);
<add> data = pair.first;
<add>
<ide> return data;
<ide> }
<ide> }
<ide>
<add> @NotNull
<ide> private Map<String, PsiAnnotation> doCollect(@NotNull PsiModifierListOwner listOwner, boolean onlyWritable) {
<ide> final List<PsiFile> files = findExternalAnnotationsFiles(listOwner);
<ide> if (files == null) {
<ide> return Collections.emptyMap();
<ide> }
<del> Map<String, PsiAnnotation> result = new HashMap<String, PsiAnnotation>();
<add> Map<String, PsiAnnotation> result = new THashMap<String, PsiAnnotation>();
<ide> String externalName = getExternalName(listOwner, false);
<ide> String oldExternalName = getNormalizedExternalName(listOwner);
<ide>
<ide> if (result.containsKey(annotationData.annotationClassFqName)) continue;
<ide>
<ide> try {
<del> result.put(annotationData.annotationClassFqName,
<del> JavaPsiFacade.getInstance(myPsiManager.getProject()).getElementFactory().createAnnotationFromText(
<del> annotationData.annotationText, null));
<add> PsiElementFactory factory = JavaPsiFacade.getInstance(myPsiManager.getProject()).getElementFactory();
<add> PsiAnnotation annotation = factory.createAnnotationFromText(annotationData.annotationText, null);
<add> result.put(annotationData.annotationClassFqName, annotation);
<ide> }
<ide> catch (IncorrectOperationException e) {
<ide> LOG.error(e);
<ide> if (w1 == w2) {
<ide> return 0;
<ide> }
<del> else {
<del> return w1 ? -1 : 1;
<del> }
<add> return w1 ? -1 : 1;
<ide> }
<ide> });
<ide>
<ide> char c = invalidXml.charAt(i);
<ide> if (inAttribute && c == '<') {
<ide> buf.append("<");
<del> } else if (inAttribute && c == '>') {
<add> }
<add> else if (inAttribute && c == '>') {
<ide> buf.append(">");
<del> } else if (c == '\"' || c == '\'') {
<add> }
<add> else if (c == '\"' || c == '\'') {
<ide> buf.append('\"');
<ide> inAttribute = !inAttribute;
<del> } else {
<add> }
<add> else {
<ide> buf.append(c);
<ide> }
<ide> }
<ide> }
<ide>
<ide> private static class AnnotationData {
<del> public String annotationClassFqName;
<del> public String annotationText;
<del>
<del> private AnnotationData(String annotationClassFqName, String annotationText) {
<add> @NotNull public String annotationClassFqName;
<add> @NotNull public String annotationText;
<add>
<add> private AnnotationData(@NotNull String annotationClassFqName, @NotNull String annotationText) {
<ide> this.annotationClassFqName = annotationClassFqName;
<ide> this.annotationText = annotationText;
<ide> } |
|
JavaScript | mit | d221e38f7226a869524eba8273e8f77ed6238019 | 0 | mmalecki/node-rubish | Number.prototype.times = function (iterator) {
for (var i = 0; i < this; i++) {
iterator(i);
}
};
Number.prototype.upto = function (up, iterator) {
for (var i = this.valueOf(); i <= up; i++) {
iterator(i);
}
return this.valueOf();
};
| lib/rubish/number.js | Number.prototype.times = function (iterator) {
for (var i = 0; i < this; i++) {
iterator(i);
}
};
| [rubish] `Number.upto` function
| lib/rubish/number.js | [rubish] `Number.upto` function | <ide><path>ib/rubish/number.js
<ide> }
<ide> };
<ide>
<add>Number.prototype.upto = function (up, iterator) {
<add> for (var i = this.valueOf(); i <= up; i++) {
<add> iterator(i);
<add> }
<add> return this.valueOf();
<add>};
<add> |
|
Java | apache-2.0 | 59cd54bee8023d35afafc87bea973828d8012e5f | 0 | da1z/intellij-community,allotria/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,da1z/intellij-community,allotria/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,xfournet/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,xfournet/intellij-community,allotria/intellij-community,xfournet/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,da1z/intellij-community,da1z/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.streams.resolve;
import com.intellij.debugger.streams.trace.smart.TraceElement;
import com.intellij.debugger.streams.trace.smart.resolve.TraceInfo;
import org.jetbrains.annotations.NotNull;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @author Vitaliy.Bibaev
*/
public class ResolverFactoryImpl implements ResolverFactory {
private static final ValuesOrderResolver EMPTY_RESOLVER = new MyEmptyResolver();
private static final ValuesOrderResolver IDENTITY_RESOLVER = new MyIdentityResolver();
private static class Holder {
private static final ResolverFactoryImpl INSTANCE = new ResolverFactoryImpl();
}
public static ResolverFactory getInstance() {
return Holder.INSTANCE;
}
@NotNull
@Override
public ValuesOrderResolver getResolver(@NotNull String methodName) {
switch (methodName) {
case "filter":
case "limit":
case "skip":
return new FilterResolver();
case "flatMap":
return new FlatMapResolver();
case "map":
case "mapToInt":
case "mapToLong":
case "mapToDouble":
case "boxed":
return new MapResolver();
case "sorted":
case "peek":
return IDENTITY_RESOLVER;
case "distinct":
return new DistinctResolver();
default:
return EMPTY_RESOLVER;
}
}
private static class MyEmptyResolver implements ValuesOrderResolver {
@NotNull
@Override
public Result resolve(@NotNull TraceInfo info) {
final Map<Integer, TraceElement> orderBefore = info.getValuesOrderBefore();
final Map<Integer, TraceElement> orderAfter = info.getValuesOrderAfter();
return Result.of(toEmptyMap(orderBefore), toEmptyMap(orderAfter));
}
@NotNull
private static Map<TraceElement, List<TraceElement>> toEmptyMap(@NotNull Map<Integer, TraceElement> order) {
return order.keySet().stream().sorted().collect(Collectors.toMap(order::get, x -> Collections.emptyList()));
}
}
private static class MyIdentityResolver implements ValuesOrderResolver {
@NotNull
@Override
public Result resolve(@NotNull TraceInfo info) {
final Map<Integer, TraceElement> before = info.getValuesOrderBefore();
final Map<Integer, TraceElement> after = info.getValuesOrderAfter();
assert before.size() == after.size();
return Result.of(buildIdentityMapping(before), buildIdentityMapping(after));
}
private static Map<TraceElement, List<TraceElement>> buildIdentityMapping(@NotNull Map<Integer, TraceElement> previousCalls) {
final LinkedHashMap<TraceElement, List<TraceElement>> result = new LinkedHashMap<>();
previousCalls.values().stream().distinct().forEach(x -> result.put(x, Collections.singletonList(x)));
return result;
}
}
}
| plugins/stream-debugger/src/main/java/com/intellij/debugger/streams/resolve/ResolverFactoryImpl.java | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.streams.resolve;
import com.intellij.debugger.streams.trace.smart.TraceElement;
import com.intellij.debugger.streams.trace.smart.resolve.TraceInfo;
import org.jetbrains.annotations.NotNull;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @author Vitaliy.Bibaev
*/
public class ResolverFactoryImpl implements ResolverFactory {
private static final ValuesOrderResolver EMPTY_RESOLVER = new MyEmptyResolver();
private static final ValuesOrderResolver IDENTITY_RESOLVER = new MyIdentityResolver();
private static class Holder {
private static final ResolverFactoryImpl INSTANCE = new ResolverFactoryImpl();
}
public static ResolverFactory getInstance() {
return Holder.INSTANCE;
}
@NotNull
@Override
public ValuesOrderResolver getResolver(@NotNull String methodName) {
switch (methodName) {
case "filter":
case "limit":
return new FilterResolver();
case "flatMap":
return new FlatMapResolver();
case "map":
case "mapToInt":
case "mapToLong":
case "mapToDouble":
case "boxed":
return new MapResolver();
case "sorted":
case "peek":
return IDENTITY_RESOLVER;
case "distinct":
return new DistinctResolver();
default:
return EMPTY_RESOLVER;
}
}
private static class MyEmptyResolver implements ValuesOrderResolver {
@NotNull
@Override
public Result resolve(@NotNull TraceInfo info) {
final Map<Integer, TraceElement> orderBefore = info.getValuesOrderBefore();
final Map<Integer, TraceElement> orderAfter = info.getValuesOrderAfter();
return Result.of(toEmptyMap(orderBefore), toEmptyMap(orderAfter));
}
@NotNull
private static Map<TraceElement, List<TraceElement>> toEmptyMap(@NotNull Map<Integer, TraceElement> order) {
return order.keySet().stream().sorted().collect(Collectors.toMap(order::get, x -> Collections.emptyList()));
}
}
private static class MyIdentityResolver implements ValuesOrderResolver {
@NotNull
@Override
public Result resolve(@NotNull TraceInfo info) {
final Map<Integer, TraceElement> before = info.getValuesOrderBefore();
final Map<Integer, TraceElement> after = info.getValuesOrderAfter();
assert before.size() == after.size();
return Result.of(buildIdentityMapping(before), buildIdentityMapping(after));
}
private static Map<TraceElement, List<TraceElement>> buildIdentityMapping(@NotNull Map<Integer, TraceElement> previousCalls) {
final LinkedHashMap<TraceElement, List<TraceElement>> result = new LinkedHashMap<>();
previousCalls.values().stream().distinct().forEach(x -> result.put(x, Collections.singletonList(x)));
return result;
}
}
}
| Change resolver for skip intermediate operation
| plugins/stream-debugger/src/main/java/com/intellij/debugger/streams/resolve/ResolverFactoryImpl.java | Change resolver for skip intermediate operation | <ide><path>lugins/stream-debugger/src/main/java/com/intellij/debugger/streams/resolve/ResolverFactoryImpl.java
<ide> switch (methodName) {
<ide> case "filter":
<ide> case "limit":
<add> case "skip":
<ide> return new FilterResolver();
<ide> case "flatMap":
<ide> return new FlatMapResolver(); |
|
Java | epl-1.0 | 4ee19f8485624c865a2a3e51a01000b9d1735734 | 0 | junit-team/junit-lambda,sbrannen/junit-lambda | /*
* Copyright 2015-2018 the original author or authors.
*
* All rights reserved. This program and the accompanying materials are
* made available under the terms of the Eclipse Public License v2.0 which
* accompanies this distribution and is available at
*
* http://www.eclipse.org/legal/epl-v20.html
*/
package org.junit.jupiter.params.provider;
import static java.lang.String.format;
import static org.junit.jupiter.params.provider.Arguments.arguments;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.stream.Stream;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.params.support.AnnotationConsumer;
import org.junit.platform.commons.JUnitException;
import org.junit.platform.commons.util.CollectionUtils;
import org.junit.platform.commons.util.Preconditions;
import org.junit.platform.commons.util.ReflectionUtils;
import org.junit.platform.commons.util.StringUtils;
/**
* @since 5.0
*/
class MethodArgumentsProvider implements ArgumentsProvider, AnnotationConsumer<MethodSource> {
private String[] methodNames;
@Override
public void accept(MethodSource annotation) {
this.methodNames = annotation.value();
}
@Override
public Stream<Arguments> provideArguments(ExtensionContext context) {
Object testInstance = context.getTestInstance().orElse(null);
// @formatter:off
return Arrays.stream(this.methodNames)
.map(factoryMethodName -> getMethod(context, factoryMethodName))
.map(method -> ReflectionUtils.invokeMethod(method, testInstance))
.flatMap(CollectionUtils::toStream)
.map(MethodArgumentsProvider::toArguments);
// @formatter:on
}
private Method getMethod(ExtensionContext context, String factoryMethodName) {
if (StringUtils.isNotBlank(factoryMethodName)) {
if (factoryMethodName.contains("#")) {
return getMethodByFullyQualifiedName(factoryMethodName);
}
else {
return getMethod(context.getRequiredTestClass(), factoryMethodName);
}
}
return getMethod(context.getRequiredTestClass(), context.getRequiredTestMethod().getName());
}
private Method getMethodByFullyQualifiedName(String fullyQualifiedMethodName) {
String[] methodParts = ReflectionUtils.parseFullyQualifiedMethodName(fullyQualifiedMethodName);
String className = methodParts[0];
String methodName = methodParts[1];
String methodParameters = methodParts[2];
Preconditions.condition(StringUtils.isBlank(methodParameters),
() -> format("factory method [%s] must not declare formal parameters", fullyQualifiedMethodName));
return getMethod(loadRequiredClass(className), methodName);
}
private Class<?> loadRequiredClass(String className) {
return ReflectionUtils.tryToLoadClass(className).getOrThrow(
cause -> new JUnitException(format("Could not load class [%s]", className), cause));
}
private Method getMethod(Class<?> clazz, String methodName) {
return ReflectionUtils.findMethod(clazz, methodName).orElseThrow(() -> new JUnitException(
format("Could not find factory method [%s] in class [%s]", methodName, clazz.getName())));
}
private static Arguments toArguments(Object item) {
// Nothing to do except cast.
if (item instanceof Arguments) {
return (Arguments) item;
}
// Pass all multidimensional arrays "as is", in contrast to Object[].
// See https://github.com/junit-team/junit5/issues/1665
if (ReflectionUtils.isMultidimensionalArray(item)) {
return arguments(item);
}
// Special treatment for one-dimensional reference arrays.
// See https://github.com/junit-team/junit5/issues/1665
if (item instanceof Object[]) {
return arguments((Object[]) item);
}
// Pass everything else "as is".
return arguments(item);
}
}
| junit-jupiter-params/src/main/java/org/junit/jupiter/params/provider/MethodArgumentsProvider.java | /*
* Copyright 2015-2018 the original author or authors.
*
* All rights reserved. This program and the accompanying materials are
* made available under the terms of the Eclipse Public License v2.0 which
* accompanies this distribution and is available at
*
* http://www.eclipse.org/legal/epl-v20.html
*/
package org.junit.jupiter.params.provider;
import static java.lang.String.format;
import static org.junit.jupiter.params.provider.Arguments.arguments;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.stream.Stream;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.params.support.AnnotationConsumer;
import org.junit.platform.commons.JUnitException;
import org.junit.platform.commons.util.CollectionUtils;
import org.junit.platform.commons.util.Preconditions;
import org.junit.platform.commons.util.ReflectionUtils;
import org.junit.platform.commons.util.StringUtils;
/**
* @since 5.0
*/
class MethodArgumentsProvider implements ArgumentsProvider, AnnotationConsumer<MethodSource> {
private String[] methodNames;
@Override
public void accept(MethodSource annotation) {
this.methodNames = annotation.value();
}
@Override
public Stream<Arguments> provideArguments(ExtensionContext context) {
Object testInstance = context.getTestInstance().orElse(null);
// @formatter:off
return Arrays.stream(this.methodNames)
.map(argumentsMethodName -> getMethod(context, argumentsMethodName))
.map(method -> ReflectionUtils.invokeMethod(method, testInstance))
.flatMap(CollectionUtils::toStream)
.map(MethodArgumentsProvider::toArguments);
// @formatter:on
}
private Method getMethod(ExtensionContext context, String argumentsMethodName) {
if (StringUtils.isNotBlank(argumentsMethodName)) {
if (argumentsMethodName.contains("#")) {
return getMethodByFullyQualifiedName(argumentsMethodName);
}
else {
return getMethod(context.getRequiredTestClass(), argumentsMethodName);
}
}
return getMethod(context.getRequiredTestClass(), context.getRequiredTestMethod().getName());
}
private Method getMethodByFullyQualifiedName(String fullyQualifiedMethodName) {
String[] methodParts = ReflectionUtils.parseFullyQualifiedMethodName(fullyQualifiedMethodName);
String className = methodParts[0];
String methodName = methodParts[1];
String methodParameters = methodParts[2];
Preconditions.condition(StringUtils.isBlank(methodParameters),
() -> format("factory method [%s] must not declare formal parameters", fullyQualifiedMethodName));
return getMethod(loadRequiredClass(className), methodName);
}
private Class<?> loadRequiredClass(String className) {
return ReflectionUtils.tryToLoadClass(className).getOrThrow(
cause -> new JUnitException(format("Could not load class [%s]", className), cause));
}
private Method getMethod(Class<?> clazz, String methodName) {
return ReflectionUtils.findMethod(clazz, methodName).orElseThrow(() -> new JUnitException(
format("Could not find factory method [%s] in class [%s]", methodName, clazz.getName())));
}
private static Arguments toArguments(Object item) {
// Nothing to do except cast.
if (item instanceof Arguments) {
return (Arguments) item;
}
// Pass all multidimensional arrays "as is", in contrast to Object[].
// See https://github.com/junit-team/junit5/issues/1665
if (ReflectionUtils.isMultidimensionalArray(item)) {
return arguments(item);
}
// Special treatment for one-dimensional reference arrays.
// See https://github.com/junit-team/junit5/issues/1665
if (item instanceof Object[]) {
return arguments((Object[]) item);
}
// Pass everything else "as is".
return arguments(item);
}
}
| Polishing
| junit-jupiter-params/src/main/java/org/junit/jupiter/params/provider/MethodArgumentsProvider.java | Polishing | <ide><path>unit-jupiter-params/src/main/java/org/junit/jupiter/params/provider/MethodArgumentsProvider.java
<ide> Object testInstance = context.getTestInstance().orElse(null);
<ide> // @formatter:off
<ide> return Arrays.stream(this.methodNames)
<del> .map(argumentsMethodName -> getMethod(context, argumentsMethodName))
<add> .map(factoryMethodName -> getMethod(context, factoryMethodName))
<ide> .map(method -> ReflectionUtils.invokeMethod(method, testInstance))
<ide> .flatMap(CollectionUtils::toStream)
<ide> .map(MethodArgumentsProvider::toArguments);
<ide> // @formatter:on
<ide> }
<ide>
<del> private Method getMethod(ExtensionContext context, String argumentsMethodName) {
<del> if (StringUtils.isNotBlank(argumentsMethodName)) {
<del> if (argumentsMethodName.contains("#")) {
<del> return getMethodByFullyQualifiedName(argumentsMethodName);
<add> private Method getMethod(ExtensionContext context, String factoryMethodName) {
<add> if (StringUtils.isNotBlank(factoryMethodName)) {
<add> if (factoryMethodName.contains("#")) {
<add> return getMethodByFullyQualifiedName(factoryMethodName);
<ide> }
<ide> else {
<del> return getMethod(context.getRequiredTestClass(), argumentsMethodName);
<add> return getMethod(context.getRequiredTestClass(), factoryMethodName);
<ide> }
<ide> }
<ide> return getMethod(context.getRequiredTestClass(), context.getRequiredTestMethod().getName()); |
|
Java | mit | 1a2f6f3762d6bbb8ed6800686ad9ebefeae5f798 | 0 | bwkimmel/jmist | /**
*
*/
package org.jmist.framework.services;
import java.rmi.RMISecurityManager;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import javax.swing.JDialog;
import org.jmist.framework.Job;
import org.jmist.framework.reporting.ProgressTreePanel;
/**
* @author brad
*
*/
public final class WorkerClient {
/**
* @param args
*/
public static void main(String[] args) {
if (System.getSecurityManager() == null) {
System.setSecurityManager(new RMISecurityManager());
}
String host = args[0].trim().length() > 0 ? args[0] : "localhost";
JDialog dialog = new JDialog();
ProgressTreePanel monitor = new ProgressTreePanel();
Executor threadPool = Executors.newFixedThreadPool(2, new BackgroundThreadFactory());
Job workerJob = new ThreadServiceWorkerJob(host, 10000, 2, threadPool);
dialog.add(monitor);
dialog.setBounds(0, 0, 400, 300);
dialog.setVisible(true);
workerJob.go(monitor);
}
}
| src/org/jmist/framework/services/WorkerClient.java | /**
*
*/
package org.jmist.framework.services;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import javax.swing.JDialog;
import org.jmist.framework.Job;
import org.jmist.framework.reporting.ProgressTreePanel;
/**
* @author brad
*
*/
public final class WorkerClient {
/**
* @param args
*/
public static void main(String[] args) {
String host = args[0].trim().length() > 0 ? args[0] : "localhost";
JDialog dialog = new JDialog();
ProgressTreePanel monitor = new ProgressTreePanel();
Executor threadPool = Executors.newFixedThreadPool(2, new BackgroundThreadFactory());
Job workerJob = new ThreadServiceWorkerJob(host, 10000, 2, threadPool);
dialog.add(monitor);
dialog.setBounds(0, 0, 400, 300);
dialog.setVisible(true);
workerJob.go(monitor);
}
}
| Set the security manager.
| src/org/jmist/framework/services/WorkerClient.java | Set the security manager. | <ide><path>rc/org/jmist/framework/services/WorkerClient.java
<ide> */
<ide> package org.jmist.framework.services;
<ide>
<add>import java.rmi.RMISecurityManager;
<ide> import java.util.concurrent.Executor;
<ide> import java.util.concurrent.Executors;
<ide>
<ide> */
<ide> public static void main(String[] args) {
<ide>
<add> if (System.getSecurityManager() == null) {
<add> System.setSecurityManager(new RMISecurityManager());
<add> }
<add>
<ide> String host = args[0].trim().length() > 0 ? args[0] : "localhost";
<ide> JDialog dialog = new JDialog();
<ide> ProgressTreePanel monitor = new ProgressTreePanel(); |
|
Java | mit | fac49b68b8657eed19366f969bf435f73e63229f | 0 | pmidford/owlbuilder | package org.arachb.owlbuilder.lib;
import java.util.HashMap;
import java.util.Map;
import org.arachb.arachadmin.PublicationBean;
import org.arachb.owlbuilder.Owlbuilder;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAnnotation;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLClassAssertionAxiom;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLIndividual;
import org.semanticweb.owlapi.model.OWLObject;
import org.semanticweb.owlapi.model.OWLOntologyManager;
public class Publication implements GeneratingEntity {
private final PublicationBean bean;
private String generatedLabel;
public Publication(PublicationBean b){
bean = b;
generatedLabel = generateLabel();
}
public boolean hasGeneratedLabel(){
return (generatedLabel != null);
}
public String getGeneratedLabel(){
return generatedLabel;
}
//generate a label; this should gradually get smarter
private String generateLabel(){
StringBuilder b = new StringBuilder(100);
b.append(bean.getAuthorList());
b.append(' ');
b.append(bean.getPublicationYear());
return b.toString();
}
final private static Map<String,OWLObject> defaultElementMap = new HashMap<String,OWLObject>();
@Override
public OWLObject generateOWL(Owlbuilder builder) throws Exception {
OWLObject result = generateOWL(builder, defaultElementMap);
defaultElementMap.clear();
return result;
}
@Override
public OWLObject generateOWL(Owlbuilder builder, Map<String, OWLObject> elements)
throws Exception {
final OWLOntologyManager manager = builder.getOntologyManager();
final OWLDataFactory factory = builder.getDataFactory();
final OWLClass pubAboutInvestigationClass =
factory.getOWLClass(Vocabulary.pubAboutInvestigation);
IRI publication_id = IRI.create(bean.checkIRIString(builder.getIRIManager()));
assert(publication_id != null);
OWLIndividual pub_ind = factory.getOWLNamedIndividual(publication_id);
OWLClassAssertionAxiom classAssertion =
factory.getOWLClassAssertionAxiom(pubAboutInvestigationClass, pub_ind);
manager.addAxiom(builder.getTarget(), classAssertion);
if (hasGeneratedLabel()){
OWLAnnotation labelAnno = factory.getOWLAnnotation(factory.getRDFSLabel(),
factory.getOWLLiteral(getGeneratedLabel()));
OWLAxiom ax = factory.getOWLAnnotationAssertionAxiom(publication_id, labelAnno);
// Add the axiom to the ontology
manager.addAxiom(builder.getTarget(),ax);
}
return pub_ind;
}
}
| src/main/java/org/arachb/owlbuilder/lib/Publication.java | package org.arachb.owlbuilder.lib;
import java.sql.SQLException;
import org.arachb.arachadmin.PublicationBean;
import org.arachb.owlbuilder.Owlbuilder;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAnnotation;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLClassAssertionAxiom;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLIndividual;
import org.semanticweb.owlapi.model.OWLObject;
import org.semanticweb.owlapi.model.OWLOntologyManager;
public class Publication implements GeneratingEntity {
private final PublicationBean bean;
private String generatedLabel;
public Publication(PublicationBean b){
bean = b;
generatedLabel = generateLabel();
}
public boolean hasGeneratedLabel(){
return (generatedLabel != null);
}
public String getGeneratedLabel(){
return generatedLabel;
}
//generate a label; this should gradually get smarter
private String generateLabel(){
StringBuilder b = new StringBuilder(100);
b.append(bean.getAuthorList());
b.append(' ');
b.append(bean.getPublicationYear());
return b.toString();
}
@Override
public OWLObject generateOWL(Owlbuilder builder) throws SQLException {
final OWLOntologyManager manager = builder.getOntologyManager();
final OWLDataFactory factory = builder.getDataFactory();
final OWLClass pubAboutInvestigationClass =
factory.getOWLClass(Vocabulary.pubAboutInvestigation);
IRI publication_id = IRI.create(bean.checkIRIString(builder.getIRIManager()));
assert(publication_id != null);
OWLIndividual pub_ind = factory.getOWLNamedIndividual(publication_id);
OWLClassAssertionAxiom classAssertion =
factory.getOWLClassAssertionAxiom(pubAboutInvestigationClass, pub_ind);
manager.addAxiom(builder.getTarget(), classAssertion);
if (hasGeneratedLabel()){
OWLAnnotation labelAnno = factory.getOWLAnnotation(factory.getRDFSLabel(),
factory.getOWLLiteral(getGeneratedLabel()));
OWLAxiom ax = factory.getOWLAnnotationAssertionAxiom(publication_id, labelAnno);
// Add the axiom to the ontology
manager.addAxiom(builder.getTarget(),ax);
}
return pub_ind;
}
}
| update generation methods
| src/main/java/org/arachb/owlbuilder/lib/Publication.java | update generation methods | <ide><path>rc/main/java/org/arachb/owlbuilder/lib/Publication.java
<ide> package org.arachb.owlbuilder.lib;
<ide>
<del>import java.sql.SQLException;
<add>import java.util.HashMap;
<add>import java.util.Map;
<ide>
<ide> import org.arachb.arachadmin.PublicationBean;
<ide> import org.arachb.owlbuilder.Owlbuilder;
<ide> private final PublicationBean bean;
<ide>
<ide> private String generatedLabel;
<del>
<del>
<ide>
<ide> public Publication(PublicationBean b){
<ide> bean = b;
<ide> return b.toString();
<ide> }
<ide>
<add> final private static Map<String,OWLObject> defaultElementMap = new HashMap<String,OWLObject>();
<add>
<add> @Override
<add> public OWLObject generateOWL(Owlbuilder builder) throws Exception {
<add> OWLObject result = generateOWL(builder, defaultElementMap);
<add> defaultElementMap.clear();
<add> return result;
<add> }
<ide>
<ide>
<ide> @Override
<del> public OWLObject generateOWL(Owlbuilder builder) throws SQLException {
<add> public OWLObject generateOWL(Owlbuilder builder, Map<String, OWLObject> elements)
<add> throws Exception {
<ide> final OWLOntologyManager manager = builder.getOntologyManager();
<ide> final OWLDataFactory factory = builder.getDataFactory();
<ide> final OWLClass pubAboutInvestigationClass =
<ide> }
<ide> return pub_ind;
<ide> }
<del>
<del>
<ide>
<del>
<del>
<del>
<del>
<del>
<del>
<del>
<ide> } |
|
JavaScript | mit | 2406c7b5093062649c849cc3da6ecc25f123a3bc | 0 | LoudBit/the-thing-is | // The Thing Is
// Configuration driven validation
// with the goal of near-english syntax
// Usage:
//
// var whatYouExpect = 'number' -- string (least useful)
// var whatYouExpect = ['present', 'number', {greaterThan:0}] -- array (tightest to write)
// var whatYouExpect = {present:true, number:true, greaterThan:0} -- object (verbose, unreliable order)
//
// if (the(thing).is(whatYouExpect)) {
// fuckYeah()
// }
//
// if (the(thing).isnt(whatYouExpect)) {
// throwSomething()
// }
// Dependencies
var is = require('is-it')
var method
function the(thing) {
the.past.push({ thing:thing })
the.last = the.past[the.past.length-1 || 0]
return comparisons
}
the.past = []
the.last = null
// is, isnt, is.not
var comparisons = {
is: function(whatYouExpect) {
// 'present' -- single boolean check
// ['present', 'number'] -- array of boolean
// {greaterThan:0} -- configuration object with single condition
// {present:true, integer:true, greaterThan:0} -- unreliable due to unreliable hash key order
// [{present:true}, {integer:true}, {greaterThan:0}] -- reliable, but verbose
// ['present', 'integer', {greaterThanOrEqualTo:0}] -- preferred
},
isnt: function(whatYouExpect) {
}
}
function isProxy(config) {
return function() {
var args = Array.prototype.slice.call(arguments)
args.unshift(the.last.subject)
if ( !the.last.error && is.not[method].apply(this, args) ) {
the.error(method)
}
}
}
function isntProxy(method) {
return function() {
var args = Array.prototype.slice.call(arguments)
args.unshift(the.last.subject)
if (!the.last.error && is[method].apply(this, args)) {
the.error(method)
}
return comparisonOperators.isnt
}
}
for (method in is) {
if (is.hasOwnProperty(method) && typeof is[method] == 'function') {
// create the(thing).is.whatever()
comparisons.is[method] = (isProxy)(method)
comparisons.isnt[method] = (isntProxy)(method)
}
}
module.exports = the
| the-thing-is.js | // The Thing Is
// Configuration driven validation
// with the goal of a near-readable-english syntax
// Usage:
//
// var whatYouExpect = 'number' // string (least useful)
// var whatYouExpect = ['present', 'number', {greaterThan:0}] // array (tightest to write)
// var whatYouExpect = {present:true, number:true, greaterThan:0} // object (verbose)
//
// call a callback when done
// the(thing).isnt(whatYouExpect).then(function(){ throw Error('NOOOOOOOOOO!') })
// the(thing).isnt(whatYouExpect).then(throwSomething)
//
// optionally chain callback logic
// the(thing).isnt(whatYouExpect).do(throwSomething).else(drinkUp)
// the(thing).is(whatYouExpect).do(someThing).else(doSomethingElse)
//
// tell the function what to return to avoid the primitive v. object challenge
// return, do, then could be alias' of each other
// they'd check typeof argument and run the function, or return the value, or return true/false based on the whether or not the check failed
// ***********************************
// if (the(thing).isnt(whatYouExpect)) // automatically returns false because you passed in an array or object
// ***********************************
// if (the(thing).isnt(whatYouExpect).return(false))
// if (the(thing).isnt(whatYouExpect).do(false))
// if (the(thing).isnt(whatYouExpect).then(false))
// if (the(thing).isnt(whatYouExpect).then(callback)) // doesn't make sense in an if check
// if (the(thing).isnt(whatYouExpect).then())
// if (the(thing).is(whatYouExpect).then() )
// if (the(thing).is.present().number().gt(5).then())
// the(thing).isnt(whatYouExpect).then(callback)
// Don't do this
// it will always be true because it returns an object
//
// if (the(thing).is.present().number())
// potential alias
// ifThe(thing).is.present().number().greaterThan(5).do(callback)
// if (the(thing).is(['number', {greaterThan:0}])) {
// // celebrate
// }
// if (the(thing).isnt(['number', {greaterThan:0}])) {
// throw the.last.error
// }
// Dependencies
var is = require('is-it')
var method
function the(thing) {
the.past.push({ thing:thing })
the.last = the.past[the.past.length-1 || 0]
return comparisons
}
the.past = []
the.last = null
// is, isnt, is.not
var comparisons = {
is: function(whatYouExpect) {
// 'present' -- single boolean check
// ['present', 'number'] -- array of boolean
// {greaterThan:0} -- configuration object with single condition
// {present:true, integer:true, greaterThan:0} -- unreliable due to unreliable hash key order
// [{present:true}, {integer:true}, {greaterThan:0}] -- reliable, but verbose
// ['present', 'integer', {greaterThanOrEqualTo:0}] -- preferred
},
isnt: function(whatYouExpect) {
}
}
function isProxy(config) {
return function() {
var args = Array.prototype.slice.call(arguments)
args.unshift(the.last.subject)
if ( !the.last.error && is.not[method].apply(this, args) ) {
the.error(method)
}
}
}
function isntProxy(method) {
return function() {
var args = Array.prototype.slice.call(arguments)
args.unshift(the.last.subject)
if (!the.last.error && is[method].apply(this, args)) {
the.error(method)
}
return comparisonOperators.isnt
}
}
for (method in is) {
if (is.hasOwnProperty(method) && typeof is[method] == 'function') {
// create the(thing).is.whatever()
comparisons.is[method] = (isProxy)(method)
comparisons.isnt[method] = (isntProxy)(method)
}
}
module.exports = the
| cleaned up notes
| the-thing-is.js | cleaned up notes | <ide><path>he-thing-is.js
<ide> // The Thing Is
<ide> // Configuration driven validation
<del>// with the goal of a near-readable-english syntax
<add>// with the goal of near-english syntax
<ide>
<ide> // Usage:
<ide> //
<del>// var whatYouExpect = 'number' // string (least useful)
<del>// var whatYouExpect = ['present', 'number', {greaterThan:0}] // array (tightest to write)
<del>// var whatYouExpect = {present:true, number:true, greaterThan:0} // object (verbose)
<add>// var whatYouExpect = 'number' -- string (least useful)
<add>// var whatYouExpect = ['present', 'number', {greaterThan:0}] -- array (tightest to write)
<add>// var whatYouExpect = {present:true, number:true, greaterThan:0} -- object (verbose, unreliable order)
<ide> //
<del>// call a callback when done
<del>// the(thing).isnt(whatYouExpect).then(function(){ throw Error('NOOOOOOOOOO!') })
<del>// the(thing).isnt(whatYouExpect).then(throwSomething)
<add>// if (the(thing).is(whatYouExpect)) {
<add>// fuckYeah()
<add>// }
<ide> //
<del>// optionally chain callback logic
<del>// the(thing).isnt(whatYouExpect).do(throwSomething).else(drinkUp)
<del>// the(thing).is(whatYouExpect).do(someThing).else(doSomethingElse)
<del>//
<del>// tell the function what to return to avoid the primitive v. object challenge
<del>// return, do, then could be alias' of each other
<del>// they'd check typeof argument and run the function, or return the value, or return true/false based on the whether or not the check failed
<del>// ***********************************
<del>// if (the(thing).isnt(whatYouExpect)) // automatically returns false because you passed in an array or object
<del>// ***********************************
<del>// if (the(thing).isnt(whatYouExpect).return(false))
<del>// if (the(thing).isnt(whatYouExpect).do(false))
<del>// if (the(thing).isnt(whatYouExpect).then(false))
<del>// if (the(thing).isnt(whatYouExpect).then(callback)) // doesn't make sense in an if check
<del>// if (the(thing).isnt(whatYouExpect).then())
<del>// if (the(thing).is(whatYouExpect).then() )
<del>// if (the(thing).is.present().number().gt(5).then())
<del>// the(thing).isnt(whatYouExpect).then(callback)
<del>
<del>// Don't do this
<del>// it will always be true because it returns an object
<del>//
<del>// if (the(thing).is.present().number())
<del>
<del>// potential alias
<del>// ifThe(thing).is.present().number().greaterThan(5).do(callback)
<del>
<del>
<del>// if (the(thing).is(['number', {greaterThan:0}])) {
<del>// // celebrate
<del>// }
<del>// if (the(thing).isnt(['number', {greaterThan:0}])) {
<del>// throw the.last.error
<add>// if (the(thing).isnt(whatYouExpect)) {
<add>// throwSomething()
<ide> // }
<ide>
<ide> // Dependencies |
|
Java | mit | 47b93e10d7f2938b7d403fc7d42212a10d488b35 | 0 | Steven-N-Hart/vcf-miner,Steven-N-Hart/vcf-miner,Steven-N-Hart/vcf-miner,Steven-N-Hart/vcf-miner,Steven-N-Hart/vcf-miner | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package edu.mayo.ve.VCFParser;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.mongodb.*;
import com.mongodb.util.JSON;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.transform.IdentityPipe;
import com.tinkerpop.pipes.util.Pipeline;
import edu.mayo.TypeAhead.TypeAheadCollection;
import edu.mayo.TypeAhead.TypeAheadInterface;
import edu.mayo.concurrency.exceptions.ProcessTerminatedException;
import edu.mayo.concurrency.workerQueue.Task;
import edu.mayo.index.Index;
import edu.mayo.parsers.ParserInterface;
import edu.mayo.pipes.MergePipe;
import edu.mayo.pipes.ReplaceAllPipe;
import edu.mayo.pipes.UNIX.CatPipe;
import edu.mayo.pipes.bioinformatics.SampleDefinition;
import edu.mayo.pipes.bioinformatics.VCF2VariantPipe;
import edu.mayo.pipes.history.HistoryInPipe;
import edu.mayo.pipes.iterators.Compressor;
import edu.mayo.senders.FileSender;
import edu.mayo.senders.Sender;
import edu.mayo.util.MongoConnection;
import edu.mayo.util.SystemProperties;
import edu.mayo.util.Tokens;
import edu.mayo.ve.resources.MetaData;
import edu.mayo.ve.resources.SampleMeta;
import java.io.File;
import java.io.IOException;
import java.io.LineNumberReader;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.*;
//import edu.mayo.cli.CommandPlugin; TO DO! get this to work :(
/**
*
* @author m102417
*/
public class VCFParser implements ParserInterface {
private int cacheSize = 50000;
private Mongo m = MongoConnection.getMongo();
/** testingCollection contains all of the objects placed into the workspace from parsing the VCF */
HashMap<Integer,String> testingCollection = new HashMap<Integer,String>();
JsonObject json = null;
/** @param context - the execution context (so we can kill the process if needed) -- can be null */
private Task context = null;
/** @param typeAhead - the implementation for where value sets will be stored for providing type-ahead functionality. */
private TypeAheadInterface typeAhead = new TypeAheadCollection();
/** @param testing -- populate in-memoryt testingCollection instead of Mongo. */
private boolean testing = false;
/** @param reporting - if verbose output is desired (much slower and not for production use, use when debugging) */
private boolean reporting = false;
private Double initialLinePerformance = 0.0;
private Double averageLinePerformance = 0.0;
public static void usage(){
System.out.println("This program will parse a VCF file, obtain the 'schema' for that VCF and populate a MongoDB database with the variants in the VCF.");
System.out.println("");
System.out.println("Make sure to check your sys.properties file fo the MongoDB IP/Port combination, otherwised this script may fail");
System.out.println("usage: VCFParser <input.vcf> <workspace_id>");
}
public static void main(String[] args) throws IOException, ProcessTerminatedException {
SystemProperties sysprops = new SystemProperties();
String mongoPort = sysprops.get("mongo_port");
VCFParser parser = new VCFParser();
if( args.length != 2 ) {
usage();
System.exit(1);
}
String infile = args[0];
String workspace = args[1];
// String infile = "/data/VCFExamples/BATCH4.vcf";
// String outfile = "/data/VCFExamples/BATCH4.json";
// String workspace = "w7ee29742ff80d61953d5e6f84e1686957fbe36f7";
System.out.println("#Input File: " + infile);
System.out.println("#Workspace: " + workspace);
System.out.println("#mongo_server: " + sysprops.get("mongo_server") );
System.out.println("#mongo port: " + new Integer(sysprops.get("mongo_port")));
parser.setReporting(true);
int datalines = parser.parse(null, infile, workspace, 50000, false, true);
parser.checkAndUpdateLoadStatus(workspace, datalines, true);
parser.m.close();
//note the following will only work if you have a document in mongo like:
//{ "_id" : { "$oid" : "51afa2710364d3ebd97b533a"} , "owner" : "steve" , "alias" : "foo" , "key" : "w7ee29742ff80d61953d5e6f84e1686957fbe36f7"}
//parser.updateMetadata(workspace, "{ \"hosting\" : \"hostA\" , \"clients\" : \"888\" , \"type\" : \"vps\"}");
}
/**
*
* @return The path to an error file that will show all errors with the load
*/
public String getErrorFile(String workspace){
try {
SystemProperties sysprops = new SystemProperties();
String tmp = sysprops.get("TEMPDIR");
return tmp + File.separator + workspace + ".errors";
}catch (Exception e){
throw new RuntimeException(e);
}
}
/** legacy interface, keep it in place for testing */
public int parse(Task context, String infile, String workspace, int typeAheadCacheSize) throws ProcessTerminatedException {
return parse(context, infile, workspace, typeAheadCacheSize, false, false);
}
/** legacy interface, keep it in place for testing */
public int parse(Task context, String infile, String workspace, int typeAheadCacheSize, boolean testing) throws ProcessTerminatedException{
return parse(context, infile, workspace, typeAheadCacheSize, testing, false);
}
/** legacy interface, keep it in place for testing */
public int parse(Task context, String infile, String workspace, int typeAheadCacheSize, boolean testing, boolean reporting) throws ProcessTerminatedException {
typeAhead = new TypeAheadCollection();
this.reporting = reporting;
this.testing = testing;
return parse(infile, workspace);
}
/**
* This method makes it easier to test the logic in the VCF file by enabling testing methods
* to directly get the parsing pipeline.
* @return
*/
public Pipe getPipeline(VCF2VariantPipe vcf, String infile){
Pipe p = new Pipeline(new CatPipe(),
new ReplaceAllPipe("\\{",""),
new ReplaceAllPipe("\\}",""),
new HistoryInPipe(),
vcf,
new MergePipe("\t"),
new ReplaceAllPipe("^.*\t\\{", "{"),
new ReplaceAllPipe("\"_id\":","\"_ident\":"),
new ReplaceAllPipe("Infinity","2147483648"),
//new PrintPipe(),
new IdentityPipe()
);
p.setStarts(Arrays.asList(infile));
return p;
}
/**
* This is the simple direct interface that just works when we need a simple parser.
* parse the infile, which is a raw vcf and put it into the mongo workspace
* @param infile - the raw complete (cononical) path to the file we want to parse as a string.
* @param workspace - the key for the workspace where we will put the data
* @return lines processed.
*/
public int parse(String infile, String workspace) throws ProcessTerminatedException{
Sender sender = new FileSender(getErrorFile(workspace));
if(reporting){ System.out.println("Getting the vcf-miner database from mongo"); }
DB db = MongoConnection.getDB();
if(reporting){ System.out.println("Getting the workspace collection from mongo"); }
DBCollection col = db.getCollection(workspace);
if(reporting){
System.out.println("Setting up Pipeline,\n input file: " + infile);
System.out.println("Workspace: " + workspace);
System.out.println("TypeAhead: " + typeAhead);
System.out.println("Testing: " + testing);
System.out.println("Reporting: " + reporting);
}
//make sure we have type-ahead indexed before wo go-ahead and do the load:
typeAhead.index(workspace, reporting);
VCF2VariantPipe vcf = new VCF2VariantPipe(sender, true, false);
Pipe p = getPipeline(vcf, infile);
int i;
long starttime = System.currentTimeMillis();
DBObject jsonmeta = null;
boolean storedVariantCount = false;
if(reporting) System.out.println("Processing Data....");
try {
for(i=0; p.hasNext(); i++){
if(context!=null){ if(context.isTerminated()) throw new ProcessTerminatedException(); }
if (!storedVariantCount) {
try {
storeVariantCount(new File(infile), vcf.getHeaderLineCount(), workspace);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
storedVariantCount = true;
}
// if the line # is equally divisble by 256 (power of 2)
if (fastModulus(i, 256) == 0) {
storeCurrentVariantCount(i, workspace);
}
if(reporting) System.out.println(col.count());
String s = (String) p.next();
//System.out.println(s);
BasicDBObject bo = (BasicDBObject) JSON.parse(s);
//for type-ahead, we need access to the metadata inside the loop, try to force that here
if(jsonmeta == null){
//System.out.println(vcf.getJSONMetadata().toString());
jsonmeta = (DBObject) JSON.parse(vcf.getJSONMetadata().toString());
jsonmeta = removeDots(jsonmeta, reporting);
}
if(reporting){
System.out.println("row before removing dots:"); System.out.println(s); }
if(testing){
testingCollection.put(new Integer(i), s);
}else {//production
//System.out.println(bo.toString());
col.save(removeDots(bo, reporting));
addToTypeAhead(bo, workspace,jsonmeta);
}
if(reporting){
System.out.println("i:" + i + "\ts:" + s.length());
}
long curtime = System.currentTimeMillis();
averageLinePerformance = 1.0*(curtime-starttime)/(i+1);
if(i<50){
//consider burn in, this is the initial reading(s)...
initialLinePerformance = averageLinePerformance;
}
}
// final time, update current count
storeCurrentVariantCount(i, workspace);
} finally {
// close the FileSender so that all messages are flushed to disk
sender.close();
}
try {
addPoundSamples(vcf.getSampleDefinitions(), workspace);
}catch (IOException e){
//this exception happens when the configuration file, sys.properties is not set up correctly.
throw new ProcessTerminatedException();
}
json = vcf.getJSONMetadata();
//do some small fixes to the metadata before continuing...
//if(reporting){System.out.println("Updating metadata with type-ahead informaton");}
//DEPRICATED!!!
//json = updateMetadataWTypeAhead(json, typeAhead.getOverunFields());
if(reporting){System.out.println("Changing the structure of the FORMAT metadata");}
metadata = this.fixFormat((DBObject)JSON.parse(json.toString()));
if(!testing){
if(reporting){System.out.println("Updating metadata in database...");}
updateMetadata(workspace, metadata.toString(), reporting);
if(reporting){System.out.println("indexing...");}
index(workspace, vcf, reporting);
if(reporting){System.out.println("saving type-ahead results to the database");}
typeAhead.index(workspace, reporting);
}
if(reporting){ System.out.println("done!");}
return i; //the number of records processed
}
/**
* Stores the TOTAL variant count in the 'meta' collection
* @param vcf
* @param headerLineCount
* @param workspaceID
* @throws IOException
*/
private void storeVariantCount(File vcf, int headerLineCount, String workspaceID) throws IOException {
long timestamp = System.currentTimeMillis();
int variantCount = getLineCount(vcf) - headerLineCount;
long delta = System.currentTimeMillis() - timestamp;
int totalLines = variantCount + headerLineCount;
System.out.println("Took " + delta + "ms to get line count for file " + vcf.getAbsolutePath());
// store to mongo meta collection
BasicDBObject query = new BasicDBObject().append(Tokens.KEY, workspaceID);
BasicDBObject update = new BasicDBObject();
update.append("$set", new BasicDBObject().append("variant_count_total", variantCount));
MongoConnection.getDB().getCollection(Tokens.METADATA_COLLECTION).update(query, update);
}
/**
* Stores the CURRENT variant count in the 'meta' collection.
* @param currentLineNum
* @param workspaceID
*/
private void storeCurrentVariantCount(int currentLineNum, String workspaceID){
// store to mongo meta collection
BasicDBObject query = new BasicDBObject().append(Tokens.KEY, workspaceID);
BasicDBObject update = new BasicDBObject();
update.append("$set", new BasicDBObject().append("variant_count_current", currentLineNum));
MongoConnection.getDB().getCollection(Tokens.METADATA_COLLECTION).update(query, update);
}
/**
* Faster implementation to the Java language modulus "%" operator, which internally uses
* slow division calculations. NOTE: this only works if the divisor is a power of 2 (e.g. 2, 4, 8, 16, etc...)
*
* @param dividend
* @param divisor
* @return
*/
private int fastModulus(int dividend, int divisor) {
return dividend & (divisor - 1);
}
/**
* Gets the total line count for the given file.
* @param f
* The file to be inspected.
* @return
* The line count for the given file.
* @throws IOException
*/
private int getLineCount(File f) throws IOException {
// use compressor to figure out how to handle .zip, .gz, .bz2, etc...
File fakeOutFile = File.createTempFile("fake", "fake");
Compressor compressor = new Compressor(f, fakeOutFile);
LineNumberReader reader = null;
try {
reader = new LineNumberReader(compressor.getReader());
reader.skip(Long.MAX_VALUE);
return reader.getLineNumber();
} finally {
reader.close();
fakeOutFile.delete();
}
}
/**
* Checks whether the given INFO field has a metadata type of String or Character.
*
* @param infoField
* The INFO field to be checked.
* @param metadata
* VCF metadata.
*/
private boolean isStringOrCharacter(String infoField, DBObject metadata) {
Map headerMap = (Map) metadata.get("HEADER");
Map infoMap = (Map) headerMap.get("INFO");
if (infoMap.containsKey(infoField)) {
Map fieldMetadata = (Map) infoMap.get(infoField);
String type = (String) fieldMetadata.get("type");
if(type.equals("String") || type.equals("Character")) {
return true;
} else {
return false;
}
} else {
// no matching ##INFO metadata for this field
// it's possible the ##INFO is missing completely or not well-formed
return false;
}
}
/**
*
* @param vcfLine
* @param workspace
* @param metadata - what the header says about the field (e.g. is it a number, if so, then don't do a type-ahead!)
*/
private void addToTypeAhead(DBObject vcfLine, String workspace, DBObject metadata){
DBObject info = (DBObject) vcfLine.get("INFO"); //only care about info field for type-ahead
if(info != null){
for(String key : info.keySet()) {
String ikey = "INFO." + key;
if(isStringOrCharacter(key,metadata)) { //check to see if its a String or Character
Object o = info.get(key);
if (o instanceof BasicDBList) {
//it is a list of String or Character
addArrToTypeAhead((BasicDBList) o, workspace, ikey);
} else {
// it is a single String or Character value
typeAhead.put(workspace, ikey, String.valueOf(o));
}
}
}
}
}
/**
* ##SAMPLE LINES need to be added to their own collection so that later code can query it.
* DBObject metadata
*/
public void addPoundSamples(Iterator<SampleDefinition> iter, String workspace) throws IOException {
SampleMeta sm = new SampleMeta();
SystemProperties sysprop = new SystemProperties();
String poundsamplecol = sysprop.get(SampleMeta.sample_meta_collection);
DB db = MongoConnection.getDB();
DBCollection col = db.getCollection(poundsamplecol);
Gson gson = new Gson();
//First, get a curser to the ##SAMPLE objects.
while(iter.hasNext()){
SampleDefinition sdef = (SampleDefinition) iter.next();
String json = gson.toJson(sdef);
BasicDBObject bo = (BasicDBObject) JSON.parse(json);
bo.append(SampleMeta.sample_meta_key, workspace);
System.out.println(json);
col.save(bo);
}
sm.indexSampleDocuments();
}
/**
*
* @param values - a list of strings, doubles or ints.
* @param workspace - string, workspace we
*/
private void addArrToTypeAhead(BasicDBList values, String workspace, String ikey){
//System.out.println("Adding Array: " + ikey);
for(Object o : values){
if (o instanceof String) {
typeAhead.put(workspace, ikey, o.toString());
//System.out.println(ikey + ((String) o));
} else if (o instanceof Integer) {
typeAhead.put(workspace, ikey, String.valueOf((Integer) o));
} else if (o instanceof Double) {
typeAhead.put(workspace, ikey, String.valueOf((Double) o));
}
}
}
/**
* Adds the {@link SampleDefinition} values of type string to the typeahead backend.
* @param def
* The {@link SampleDefinition} to be used.
* @param workspace
* The workspace key.
*/
public void addToTypeAhead(SampleDefinition def, String workspace) {
for (String stringMetaKey: def.getStringKeys()) {
final String ikey = "META." + stringMetaKey;
for (String value: def.getStringValues(stringMetaKey)) {
typeAhead.put(workspace, ikey, value);
}
}
}
private DBObject metadata = null;
/**
* for testing...
*/
public DBObject getMetadata(){
return metadata;
}
public int parse(Task task, String inputVCFFile, String workspace, TypeAheadInterface typeAhead, boolean testing) throws ProcessTerminatedException {
if(task != null)
context = task;
if(typeAhead != null){
this.typeAhead = typeAhead;
}
this.testing = testing;
this.reporting = reporting;
return parse(inputVCFFile, workspace);
}
/**
* This needs to be called after a parse to ensure that the load gets correctly registered by the UI
* checks to see if the load worked or if it failed
* @param workspace - the key for the workspace
* @param datalines - the number of lines of data in the load file
* @param force - force the status as successful
* @return true if it worked, false if it failed.
*/
public boolean checkAndUpdateLoadStatus(String workspace, int datalines, boolean force){
MetaData metaData = new MetaData();
if(force){
if(reporting) System.out.println("Force flag set, the workspace will be flagged as ready!");
metaData.flagAsReady(workspace);
}
if(reporting) System.out.println("Checking the status of the load");
//the first way the load could have failed is if the number of records in the workspace != the number of data lines
BasicDBObject query = new BasicDBObject(); //empty for all
DB db = MongoConnection.getDB();
DBCollection col = db.getCollection(workspace);
long linesLoaded = col.count(query);
if(linesLoaded != datalines){
metaData.flagAsFailed(workspace, "The load failed, the number of records in the workspace (" + linesLoaded + ") does not equal the number of data lines in the original file (" + datalines +")" );
return false;
}
//are there other ways we could check / need to check that a load failed?
//everything looks ok,
//now flag the workspace as ready so the UI knows
if(reporting) System.out.println("Flagging the workspace as ready");
//requested change by patrick
//metaData.flagAsReady(workspace);
return true;
}
private class TrackNode {
public TrackNode(DBObject node, boolean discovered){
this.node = node;
this.discovered = discovered;
}
public DBObject node; // the position in the JSON structure where the dfs is pointing
public DBObject shaddow; // shaddow is the new node in the copy structure
public boolean discovered = false;
}
/**
* removeDots uses Depth First Search to traverse the json object hiarchy and replace any keys with a dot (.) in them
* with keys that have an underscore (_). This way mongo can load the key.
* @param bo
* @param reporting
* @return
*/
public DBObject removeDots(DBObject bo, boolean reporting){
if(bo == null) return bo;
if(bo.keySet().size() < 1) return bo;
//Generic non-recursive DFS algorithm is:
// 1 procedure DFS-iterative(G,v):
// 2 let S be a stack
// 3 S.push(v)
// 4 while S is not empty
// 5 v ← S.pop()
// 6 if v is not labeled as discovered:
// 7 label v as discovered
// 8 for all edges from v to w in G.adjacentEdges(v) do
// 9 S.push(w)
Stack<TrackNode> s = new Stack<TrackNode>();
TrackNode v = new TrackNode(bo, false);
s.push(v);
while( s.size() > 0){
v = s.pop();
if(v.discovered == false){
v.discovered = true;
ArrayList<String> keyset = new ArrayList<String>();
for(String key : v.node.keySet()){
//System.out.println(key);
keyset.add(key);
Object o = v.node.get(key);
if(o instanceof DBObject){
DBObject d = (DBObject) o;
s.push(new TrackNode(d,false));
}//else it is data
}
for(String key : keyset){
if(key.contains(".")){
Object o = v.node.get(key);
v.node = fixDBObjectDotKey(key, v.node);
}
}
}
}
if(reporting){
System.out.println("removedDotsResult");
System.out.println(bo.toString());
System.out.println("end");
}
return bo;
}
public String getNth(String prefix, Set<String> keys, int index){
int count =0;
for(String key : keys){
if(index == count) return key;
count++;
}
return null;
}
public DBObject removeDots2(DBObject bo, boolean reporting){
if(reporting) System.out.println("removeDots");
//first, check to see if the top level directory has any dots
for(String key : bo.keySet()){
//first, check all the leafs (one dir down only)!
Object o = bo.get(key);
if(o instanceof BasicDBObject){
BasicDBObject dboo = (BasicDBObject) o;
//deal with this concurent modification problem...
List<String> keys = new ArrayList();
for(String zkey : dboo.keySet()){
keys.add(zkey);
}
for(String k2 : keys){
if(k2.contains(".")){
fixDBObjectDotKey(k2,dboo);
}
}
}
}
//now fix and check the branches (can't do in the same loop because of this concurency bug
for(String key : bo.keySet()){
//then check the branch
if(key.contains(".")){
fixDBObjectDotKey(key, bo);
}
}
if(null != bo.get(".")){
bo.removeField(".");
}
if(null != bo.get("INFO")){
Object o = bo.get("INFO");
if(o instanceof DBObject){
if( ((DBObject)o).get(".") != null ){
((DBObject)o).removeField(".");
if( ((DBObject)o).keySet().size() == 0 ){
bo.put("INFO", ".");
}
}
}
}
if(reporting){
System.out.println("removedDotsResult");
System.out.println(bo.toString());
System.out.println("end");
}
return bo;
}
public DBObject fixDBObjectDotKey(String key, DBObject dbo){
String newkey = key.replaceAll("\\.", "_");
Object o = dbo.get(key);
if(o instanceof String){
String value = (String) o;
dbo.removeField(key);
dbo.put(newkey, value);
return dbo;
}
if(o instanceof BasicDBObject){
BasicDBObject value = (BasicDBObject) o;
dbo.removeField(key);
dbo.put(newkey, value);
return dbo;
}
if(o instanceof Integer){
Integer value = (Integer) o;
dbo.removeField(key);
dbo.put(newkey, value);
return dbo;
}
if(o instanceof Double){
Double value = (Double) o;
dbo.removeField(key);
dbo.put(newkey, value);
return dbo;
}
if(o instanceof Boolean){
Boolean value = (Boolean) o;
dbo.removeField(key);
dbo.put(newkey,value);
return dbo;
}
if(o instanceof BasicDBList){
BasicDBList value = (BasicDBList) o;
dbo.removeField(key);
dbo.put(newkey,value);
return dbo;
}
return dbo;
}
public void index(String workspace, VCF2VariantPipe vcf, boolean reporting){
JsonObject json = vcf.getJSONMetadata();
if(json == null){
return;
}
DB db = MongoConnection.getDB();
DBCollection col = db.getCollection(workspace);
//auto-index all reserved columns (as needed by the app)
indexReserved(col, reporting);
//auto-index all SNPEFF columns
indexSNPEFF(col, json, reporting);
//index format
indexFormat(vcf.getFormatKeys(), col, reporting);
}
/**
* index the reserved fields that we already know about...
* @param col
* @param reporting
*/
private void indexReserved(DBCollection col, boolean reporting){
indexField("FORMAT.GenotypePostitiveCount", col, reporting);
indexField("FORMAT.GenotypePositiveList",col, reporting); //don't need to index this as soon as the query is refactored
indexField("FORMAT.HeterozygousList",col, reporting);
indexField("FORMAT.HomozygousList",col, reporting);
}
private void indexSNPEFF(DBCollection col, JsonObject json, boolean reporting){
//indexField("INFO.SNPEFF_GENE_NAME", col);
for(String key: getSNPEFFColsFromJsonObj(json)){
indexField("INFO." + key, col, reporting);
}
}
/**
*
* @param json - the json metadata object
* @return
*/
public List<String> getSNPEFFColsFromJsonObj(JsonObject json){
ArrayList<String> a = new ArrayList<String>();
JsonObject header = json.getAsJsonObject("HEADER");
if(header == null) return a;
JsonObject info = header.getAsJsonObject("INFO");
if(info == null) return a;
Iterator<Map.Entry<String,JsonElement>> itter =info.entrySet().iterator();
while(itter.hasNext()){
Map.Entry<String, JsonElement> next = itter.next();
String key = next.getKey();
//System.out.println(key); //all keys in info
if(key.contains("SNPEFF")){
a.add(key);
}
}
return a;
}
/**
*
* @param formatFields - a list of fields to index (all format fields)
* @param col - the collection/workspace
* @param reporting - if we want to show what is going on in the tomcat log
*/
private void indexFormat(Set<String> formatFields, DBCollection col, boolean reporting){
for(String field : formatFields){
// FORMAT + . + . + field
String ikey = "FORMAT." + "min." + field;
String xkey = "FORMAT." + "max." + field;
if(reporting){ System.out.println( "Trying to index: " + ikey); }
indexField(ikey, col, reporting);
if(reporting){ System.out.println( "Trying to index: " + xkey); }
indexField(xkey, col, reporting);
}
}
private void indexFieldReplacingDot(String field, DBCollection col, boolean reporting){ //don't think this is ever used...
if(field.contains(".")){
field = field.replace(".","_");
}
indexField(field, col, reporting);
}
private Index indexUtil = new Index(); //use the indexUtil instead of the raw interface to prevent duplicate indexes!
private void indexField(String field, DBCollection col, boolean reporting){
if(reporting) System.out.println("index: " + field);
DBObject status = indexUtil.indexField(field,col);
if(reporting) System.out.println(status.toString());
}
public void updateMetadata(String workspace, String jsonUpdate, boolean reporting){
if(reporting) System.out.println("Saving Metadata to Workspace: " + workspace);
DB db = MongoConnection.getDB();
DBCollection col = db.getCollection(Tokens.METADATA_COLLECTION);
String query = "{\"key\":\""+workspace+"\"}";
BasicDBObject bo = (BasicDBObject) JSON.parse(query); //JSON2BasicDBObject
DBCursor dbc = col.find(bo);
DBObject result = null;
while(dbc.hasNext()){
result = dbc.next();
break; //there will only be one that matches!
}
String owner = result.get(Tokens.OWNER).toString();
String id = result.get("_id").toString();
String alias = result.get(Tokens.WORKSPACE_ALIAS).toString();
//key = workspace, passed in so we have that!
col.remove(bo);
//System.out.println("result: " + result.toString());
//note, we want to destroy whatever was in there with the new data (in case they try to load multiple times)
//but we have to keep owner, id, and key.
BasicDBObject replace = (BasicDBObject) JSON.parse(jsonUpdate);
//we need to remove any dot keys before we save the metadata to mongo.
DBObject replaceWODots = removeDots(replace, reporting);
// carry forward ALL existing keys/value pairs (owner, key, _id, alias, ready, status)
replaceWODots.putAll(result);
//now add the new keys
replaceWODots.put("timestamp", getISONow()); //The last time the workspace was touched.
if(reporting) System.out.println(replaceWODots.toString());
col.save(replaceWODots);
}
/**
* In the metadata returned, the format field from the vcf-variant pipe looks something like this:
*
* "FORMAT": {
* "PL": 1,
* "AD": 1,
* "GT": 1,
* "GQ": 1,
* "DP": 1,
* "MLPSAF": 1,
* "MLPSAC": 1
* }
*
* This method transforms it into this (which is what mongodb will store):
* "FORMAT": {
* "min": {
* "PL": 1,
* "AD": 1,
* "GT": 1,
* "GQ": 1,
* "DP": 1,
* "MLPSAF": 1,
* "MLPSAC": 1
* }
* "max": {
* "PL": 1,
* "AD": 1,
* "GT": 1,
* "GQ": 1,
* "DP": 1,
* "MLPSAF": 1,
* "MLPSAC": 1
* }
* }
*
* @param input
* @return
*/
public DBObject fixFormat(DBObject input){
DBObject output = input;
DBObject oldformat = (DBObject) input.removeField("FORMAT");
if(oldformat != null){
DBObject min = new BasicDBObject();
DBObject max = new BasicDBObject();
for(String s : oldformat.keySet()){
min.put(s,1);
max.put(s,1);
}
DBObject format = new BasicDBObject();
if(min.keySet().size() > 0 && max.keySet().size()>0){
format.put("min", min);
format.put("max", max);
}
output.put("FORMAT", format);
}
return output;
}
/**
*
* @return the current time in iso format
*/
public String getISONow(){
TimeZone tz = TimeZone.getTimeZone("UTC");
DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mmZ");
df.setTimeZone(tz);
String nowAsISO = df.format(new Date());
return nowAsISO;
}
public HashMap<Integer, String> getTestingCollection() {
return testingCollection;
}
public void setTestingCollection(HashMap<Integer, String> testingCollection) {
this.testingCollection = testingCollection;
}
public JsonObject getJson() {
return json;
}
public void setJson(JsonObject json) {
this.json = json;
}
public Mongo getM() {
return m;
}
public void setM(Mongo m) {
this.m = m;
}
public int getCacheSize() {
return cacheSize;
}
public void setCacheSize(int cacheSize) {
this.cacheSize = cacheSize;
}
public Task getContext() {
return context;
}
public void setContext(Task context) {
this.context = context;
}
public boolean isTesting() {
return testing;
}
public void setTesting(boolean testing) {
this.testing = testing;
}
@Override
public void setTypeAhead(TypeAheadInterface typeAheadInterface) {
this.typeAhead = typeAheadInterface;//To change body of implemented methods use File | Settings | File Templates.
}
public TypeAheadInterface getTypeAhead(){
return typeAhead;
}
public boolean isReporting() {
return reporting;
}
public void setReporting(boolean reporting) {
this.reporting = reporting;
}
public void setMetadata(DBObject metadata) {
this.metadata = metadata;
}
public Index getIndexUtil() {
return indexUtil;
}
public void setIndexUtil(Index indexUtil) {
this.indexUtil = indexUtil;
}
public Double getInitialLinePerformance() {
return initialLinePerformance;
}
public void setInitialLinePerformance(Double initialLinePerformance) {
this.initialLinePerformance = initialLinePerformance;
}
public Double getAverageLinePerformance() {
return averageLinePerformance;
}
public void setAverageLinePerformance(Double averageLinePerformance) {
this.averageLinePerformance = averageLinePerformance;
}
}
| src/main/java/edu/mayo/ve/VCFParser/VCFParser.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package edu.mayo.ve.VCFParser;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.mongodb.*;
import com.mongodb.util.JSON;
import java.io.File;
import java.io.FileReader;
import java.io.LineNumberReader;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.*;
import com.tinkerpop.pipes.Pipe;
import com.tinkerpop.pipes.transform.IdentityPipe;
import com.tinkerpop.pipes.util.Pipeline;
import edu.mayo.TypeAhead.TypeAheadCollection;
import edu.mayo.TypeAhead.TypeAheadInterface;
import edu.mayo.concurrency.exceptions.ProcessTerminatedException;
import edu.mayo.concurrency.workerQueue.Task;
import edu.mayo.parsers.ParserInterface;
import edu.mayo.pipes.MergePipe;
import edu.mayo.pipes.PrintPipe;
import edu.mayo.pipes.UNIX.CatPipe;
import edu.mayo.pipes.bioinformatics.SampleDefinition;
import edu.mayo.pipes.bioinformatics.VCF2VariantPipe;
import edu.mayo.pipes.bioinformatics.VCFHeaderParser;
import edu.mayo.pipes.history.HistoryInPipe;
import edu.mayo.index.Index;
import edu.mayo.pipes.iterators.Compressor;
import edu.mayo.senders.FileSender;
import edu.mayo.senders.Sender;
import edu.mayo.util.Tokens;
import edu.mayo.ve.resources.MetaData;
import edu.mayo.util.SystemProperties;
import java.io.IOException;
import edu.mayo.pipes.bioinformatics.SampleDefinition;
//import edu.mayo.cli.CommandPlugin; TO DO! get this to work :(
import edu.mayo.pipes.ReplaceAllPipe;
import edu.mayo.util.MongoConnection;
import edu.mayo.ve.resources.SampleMeta;
import java.util.Date;
/**
*
* @author m102417
*/
public class VCFParser implements ParserInterface {
private int cacheSize = 50000;
private Mongo m = MongoConnection.getMongo();
/** testingCollection contains all of the objects placed into the workspace from parsing the VCF */
HashMap<Integer,String> testingCollection = new HashMap<Integer,String>();
JsonObject json = null;
private boolean saveSamples = false;
/** @param context - the execution context (so we can kill the process if needed) -- can be null */
private Task context = null;
/** @param typeAhead - the implementation for where value sets will be stored for providing type-ahead functionality. */
private TypeAheadInterface typeAhead = new TypeAheadCollection();
/** @param testing -- populate in-memoryt testingCollection instead of Mongo. */
private boolean testing = false;
/** @param reporting - if verbose output is desired (much slower and not for production use, use when debugging) */
private boolean reporting = false;
private Double initialLinePerformance = 0.0;
private Double averageLinePerformance = 0.0;
public static void usage(){
System.out.println("This program will parse a VCF file, obtain the 'schema' for that VCF and populate a MongoDB database with the variants in the VCF.");
System.out.println("");
System.out.println("Make sure to check your sys.properties file fo the MongoDB IP/Port combination, otherwised this script may fail");
System.out.println("usage: VCFParser <input.vcf> <workspace_id>");
}
public static void main(String[] args) throws IOException, ProcessTerminatedException {
SystemProperties sysprops = new SystemProperties();
String mongoPort = sysprops.get("mongo_port");
VCFParser parser = new VCFParser();
if( args.length != 2 ) {
usage();
System.exit(1);
}
String infile = args[0];
String workspace = args[1];
// String infile = "/data/VCFExamples/BATCH4.vcf";
// String outfile = "/data/VCFExamples/BATCH4.json";
// String workspace = "w7ee29742ff80d61953d5e6f84e1686957fbe36f7";
System.out.println("#Input File: " + infile);
System.out.println("#Workspace: " + workspace);
System.out.println("#mongo_server: " + sysprops.get("mongo_server") );
System.out.println("#mongo port: " + new Integer(sysprops.get("mongo_port")));
parser.setReporting(true);
int datalines = parser.parse(null, infile, workspace, 50000, false, true, true);
parser.checkAndUpdateLoadStatus(workspace, datalines, true);
parser.m.close();
//note the following will only work if you have a document in mongo like:
//{ "_id" : { "$oid" : "51afa2710364d3ebd97b533a"} , "owner" : "steve" , "alias" : "foo" , "key" : "w7ee29742ff80d61953d5e6f84e1686957fbe36f7"}
//parser.updateMetadata(workspace, "{ \"hosting\" : \"hostA\" , \"clients\" : \"888\" , \"type\" : \"vps\"}");
}
/**
*
* @return The path to an error file that will show all errors with the load
*/
public String getErrorFile(String workspace){
try {
SystemProperties sysprops = new SystemProperties();
String tmp = sysprops.get("TEMPDIR");
return tmp + File.separator + workspace + ".errors";
}catch (Exception e){
throw new RuntimeException(e);
}
}
/** legacy interface, keep it in place for testing */
public int parse(Task context, String infile, String workspace, int typeAheadCacheSize) throws ProcessTerminatedException {
return parse(context, infile, workspace, typeAheadCacheSize, false, false, true);
}
/** legacy interface, keep it in place for testing */
public int parse(Task context, String infile, String workspace, int typeAheadCacheSize, boolean testing) throws ProcessTerminatedException{
return parse(context, infile, workspace, typeAheadCacheSize, testing, false, true);
}
/** legacy interface, keep it in place for testing */
public int parse(Task context, String infile, String workspace, int typeAheadCacheSize, boolean testing, boolean reporting, boolean saveSamples) throws ProcessTerminatedException {
typeAhead = new TypeAheadCollection();
this.saveSamples = saveSamples;
this.reporting = reporting;
this.testing = testing;
return parse(infile, workspace);
}
/**
* This method makes it easier to test the logic in the VCF file by enabling testing methods
* to directly get the parsing pipeline.
* @return
*/
public Pipe getPipeline(VCF2VariantPipe vcf, String infile){
Pipe p = new Pipeline(new CatPipe(),
new ReplaceAllPipe("\\{",""),
new ReplaceAllPipe("\\}",""),
new HistoryInPipe(),
vcf,
new MergePipe("\t"),
new ReplaceAllPipe("^.*\t\\{", "{"),
new ReplaceAllPipe("\"_id\":","\"_ident\":"),
new ReplaceAllPipe("Infinity","2147483648"),
//new PrintPipe(),
new IdentityPipe()
);
p.setStarts(Arrays.asList(infile));
return p;
}
/**
* This is the simple direct interface that just works when we need a simple parser.
* parse the infile, which is a raw vcf and put it into the mongo workspace
* @param infile - the raw complete (cononical) path to the file we want to parse as a string.
* @param workspace - the key for the workspace where we will put the data
* @return lines processed.
*/
public int parse(String infile, String workspace) throws ProcessTerminatedException{
Sender sender = new FileSender(getErrorFile(workspace));
if(reporting){ System.out.println("Getting the vcf-miner database from mongo"); }
DB db = MongoConnection.getDB();
if(reporting){ System.out.println("Getting the workspace collection from mongo"); }
DBCollection col = db.getCollection(workspace);
if(reporting){
System.out.println("Setting up Pipeline,\n input file: " + infile);
System.out.println("Workspace: " + workspace);
System.out.println("TypeAhead: " + typeAhead);
System.out.println("Testing: " + testing);
System.out.println("Reporting: " + reporting);
}
//make sure we have type-ahead indexed before wo go-ahead and do the load:
typeAhead.index(workspace, reporting);
VCF2VariantPipe vcf = new VCF2VariantPipe(sender, true, false);
Pipe p = getPipeline(vcf, infile);
int i;
long starttime = System.currentTimeMillis();
DBObject jsonmeta = null;
boolean storedVariantCount = false;
if(reporting) System.out.println("Processing Data....");
try {
for(i=0; p.hasNext(); i++){
if(context!=null){ if(context.isTerminated()) throw new ProcessTerminatedException(); }
if (!storedVariantCount) {
try {
storeVariantCount(new File(infile), vcf.getHeaderLineCount(), workspace);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
storedVariantCount = true;
}
// if the line # is equally divisble by 256 (power of 2)
if (fastModulus(i, 256) == 0) {
storeCurrentVariantCount(i, workspace);
}
if(reporting) System.out.println(col.count());
String s = (String) p.next();
//System.out.println(s);
BasicDBObject bo = (BasicDBObject) JSON.parse(s);
if(saveSamples == false){
bo = removeSamples(bo);
}
//for type-ahead, we need access to the metadata inside the loop, try to force that here
if(jsonmeta == null){
//System.out.println(vcf.getJSONMetadata().toString());
jsonmeta = (DBObject) JSON.parse(vcf.getJSONMetadata().toString());
jsonmeta = removeDots(jsonmeta, reporting);
}
if(reporting){
System.out.println("row before removing dots:"); System.out.println(s); }
if(testing){
testingCollection.put(new Integer(i), s);
}else {//production
//System.out.println(bo.toString());
col.save(removeDots(bo, reporting));
addToTypeAhead(bo, workspace,jsonmeta);
}
if(reporting){
System.out.println("i:" + i + "\ts:" + s.length());
}
long curtime = System.currentTimeMillis();
averageLinePerformance = 1.0*(curtime-starttime)/(i+1);
if(i<50){
//consider burn in, this is the initial reading(s)...
initialLinePerformance = averageLinePerformance;
}
}
// final time, update current count
storeCurrentVariantCount(i, workspace);
} finally {
// close the FileSender so that all messages are flushed to disk
sender.close();
}
try {
addPoundSamples(vcf.getSampleDefinitions(), workspace);
}catch (IOException e){
//this exception happens when the configuration file, sys.properties is not set up correctly.
throw new ProcessTerminatedException();
}
json = vcf.getJSONMetadata();
//do some small fixes to the metadata before continuing...
//if(reporting){System.out.println("Updating metadata with type-ahead informaton");}
//DEPRICATED!!!
//json = updateMetadataWTypeAhead(json, typeAhead.getOverunFields());
if(reporting){System.out.println("Changing the structure of the FORMAT metadata");}
metadata = this.fixFormat((DBObject)JSON.parse(json.toString()));
if(!testing){
if(reporting){System.out.println("Updating metadata in database...");}
updateMetadata(workspace, metadata.toString(), reporting);
if(reporting){System.out.println("indexing...");}
index(workspace, vcf, reporting);
if(reporting){System.out.println("saving type-ahead results to the database");}
typeAhead.index(workspace, reporting);
}
if(reporting){ System.out.println("done!");}
return i; //the number of records processed
}
/**
* Stores the TOTAL variant count in the 'meta' collection
* @param vcf
* @param headerLineCount
* @param workspaceID
* @throws IOException
*/
private void storeVariantCount(File vcf, int headerLineCount, String workspaceID) throws IOException {
long timestamp = System.currentTimeMillis();
int variantCount = getLineCount(vcf) - headerLineCount;
long delta = System.currentTimeMillis() - timestamp;
int totalLines = variantCount + headerLineCount;
System.out.println("Took " + delta + "ms to get line count for file " + vcf.getAbsolutePath());
// store to mongo meta collection
BasicDBObject query = new BasicDBObject().append(Tokens.KEY, workspaceID);
BasicDBObject update = new BasicDBObject();
update.append("$set", new BasicDBObject().append("variant_count_total", variantCount));
MongoConnection.getDB().getCollection(Tokens.METADATA_COLLECTION).update(query, update);
}
/**
* Stores the CURRENT variant count in the 'meta' collection.
* @param currentLineNum
* @param workspaceID
*/
private void storeCurrentVariantCount(int currentLineNum, String workspaceID){
// store to mongo meta collection
BasicDBObject query = new BasicDBObject().append(Tokens.KEY, workspaceID);
BasicDBObject update = new BasicDBObject();
update.append("$set", new BasicDBObject().append("variant_count_current", currentLineNum));
MongoConnection.getDB().getCollection(Tokens.METADATA_COLLECTION).update(query, update);
}
/**
* Faster implementation to the Java language modulus "%" operator, which internally uses
* slow division calculations. NOTE: this only works if the divisor is a power of 2 (e.g. 2, 4, 8, 16, etc...)
*
* @param dividend
* @param divisor
* @return
*/
private int fastModulus(int dividend, int divisor) {
return dividend & (divisor - 1);
}
/**
* Gets the total line count for the given file.
* @param f
* The file to be inspected.
* @return
* The line count for the given file.
* @throws IOException
*/
private int getLineCount(File f) throws IOException {
// use compressor to figure out how to handle .zip, .gz, .bz2, etc...
File fakeOutFile = File.createTempFile("fake", "fake");
Compressor compressor = new Compressor(f, fakeOutFile);
LineNumberReader reader = null;
try {
reader = new LineNumberReader(compressor.getReader());
reader.skip(Long.MAX_VALUE);
return reader.getLineNumber();
} finally {
reader.close();
fakeOutFile.delete();
}
}
/**
* Checks whether the given INFO field has a metadata type of String or Character.
*
* @param infoField
* The INFO field to be checked.
* @param metadata
* VCF metadata.
*/
private boolean isStringOrCharacter(String infoField, DBObject metadata) {
Map headerMap = (Map) metadata.get("HEADER");
Map infoMap = (Map) headerMap.get("INFO");
if (infoMap.containsKey(infoField)) {
Map fieldMetadata = (Map) infoMap.get(infoField);
String type = (String) fieldMetadata.get("type");
if(type.equals("String") || type.equals("Character")) {
return true;
} else {
return false;
}
} else {
// no matching ##INFO metadata for this field
// it's possible the ##INFO is missing completely or not well-formed
return false;
}
}
/**
*
* @param vcfLine
* @param workspace
* @param metadata - what the header says about the field (e.g. is it a number, if so, then don't do a type-ahead!)
*/
private void addToTypeAhead(DBObject vcfLine, String workspace, DBObject metadata){
DBObject info = (DBObject) vcfLine.get("INFO"); //only care about info field for type-ahead
if(info != null){
for(String key : info.keySet()) {
String ikey = "INFO." + key;
if(isStringOrCharacter(key,metadata)) { //check to see if its a String or Character
Object o = info.get(key);
if (o instanceof BasicDBList) {
//it is a list of String or Character
addArrToTypeAhead((BasicDBList) o, workspace, ikey);
} else {
// it is a single String or Character value
typeAhead.put(workspace, ikey, String.valueOf(o));
}
}
}
}
}
/**
* ##SAMPLE LINES need to be added to their own collection so that later code can query it.
* DBObject metadata
*/
public void addPoundSamples(Iterator<SampleDefinition> iter, String workspace) throws IOException {
SampleMeta sm = new SampleMeta();
SystemProperties sysprop = new SystemProperties();
String poundsamplecol = sysprop.get(SampleMeta.sample_meta_collection);
DB db = MongoConnection.getDB();
DBCollection col = db.getCollection(poundsamplecol);
Gson gson = new Gson();
//First, get a curser to the ##SAMPLE objects.
while(iter.hasNext()){
SampleDefinition sdef = (SampleDefinition) iter.next();
String json = gson.toJson(sdef);
BasicDBObject bo = (BasicDBObject) JSON.parse(json);
bo.append(SampleMeta.sample_meta_key, workspace);
System.out.println(json);
col.save(bo);
}
sm.indexSampleDocuments();
}
/**
*
* @param values - a list of strings, doubles or ints.
* @param workspace - string, workspace we
*/
private void addArrToTypeAhead(BasicDBList values, String workspace, String ikey){
//System.out.println("Adding Array: " + ikey);
for(Object o : values){
if (o instanceof String) {
typeAhead.put(workspace, ikey, o.toString());
//System.out.println(ikey + ((String) o));
} else if (o instanceof Integer) {
typeAhead.put(workspace, ikey, String.valueOf((Integer) o));
} else if (o instanceof Double) {
typeAhead.put(workspace, ikey, String.valueOf((Double) o));
}
}
}
/**
* Adds the {@link SampleDefinition} values of type string to the typeahead backend.
* @param def
* The {@link SampleDefinition} to be used.
* @param workspace
* The workspace key.
*/
public void addToTypeAhead(SampleDefinition def, String workspace) {
for (String stringMetaKey: def.getStringKeys()) {
final String ikey = "META." + stringMetaKey;
for (String value: def.getStringValues(stringMetaKey)) {
typeAhead.put(workspace, ikey, value);
}
}
}
private DBObject metadata = null;
/**
* for testing...
*/
public DBObject getMetadata(){
return metadata;
}
public BasicDBObject removeSamples(BasicDBObject o){
o.removeField("samples");
return o;
}
public int parse(Task task, String inputVCFFile, String workspace, TypeAheadInterface typeAhead, boolean testing) throws ProcessTerminatedException {
if(task != null)
context = task;
if(typeAhead != null){
this.typeAhead = typeAhead;
}
this.testing = testing;
this.reporting = reporting;
return parse(inputVCFFile, workspace);
}
/**
* This needs to be called after a parse to ensure that the load gets correctly registered by the UI
* checks to see if the load worked or if it failed
* @param workspace - the key for the workspace
* @param datalines - the number of lines of data in the load file
* @param force - force the status as successful
* @return true if it worked, false if it failed.
*/
public boolean checkAndUpdateLoadStatus(String workspace, int datalines, boolean force){
MetaData metaData = new MetaData();
if(force){
if(reporting) System.out.println("Force flag set, the workspace will be flagged as ready!");
metaData.flagAsReady(workspace);
}
if(reporting) System.out.println("Checking the status of the load");
//the first way the load could have failed is if the number of records in the workspace != the number of data lines
BasicDBObject query = new BasicDBObject(); //empty for all
DB db = MongoConnection.getDB();
DBCollection col = db.getCollection(workspace);
long linesLoaded = col.count(query);
if(linesLoaded != datalines){
metaData.flagAsFailed(workspace, "The load failed, the number of records in the workspace (" + linesLoaded + ") does not equal the number of data lines in the original file (" + datalines +")" );
return false;
}
//are there other ways we could check / need to check that a load failed?
//everything looks ok,
//now flag the workspace as ready so the UI knows
if(reporting) System.out.println("Flagging the workspace as ready");
//requested change by patrick
//metaData.flagAsReady(workspace);
return true;
}
private class TrackNode {
public TrackNode(DBObject node, boolean discovered){
this.node = node;
this.discovered = discovered;
}
public DBObject node; // the position in the JSON structure where the dfs is pointing
public DBObject shaddow; // shaddow is the new node in the copy structure
public boolean discovered = false;
}
/**
* removeDots uses Depth First Search to traverse the json object hiarchy and replace any keys with a dot (.) in them
* with keys that have an underscore (_). This way mongo can load the key.
* @param bo
* @param reporting
* @return
*/
public DBObject removeDots(DBObject bo, boolean reporting){
if(bo == null) return bo;
if(bo.keySet().size() < 1) return bo;
//Generic non-recursive DFS algorithm is:
// 1 procedure DFS-iterative(G,v):
// 2 let S be a stack
// 3 S.push(v)
// 4 while S is not empty
// 5 v ← S.pop()
// 6 if v is not labeled as discovered:
// 7 label v as discovered
// 8 for all edges from v to w in G.adjacentEdges(v) do
// 9 S.push(w)
Stack<TrackNode> s = new Stack<TrackNode>();
TrackNode v = new TrackNode(bo, false);
s.push(v);
while( s.size() > 0){
v = s.pop();
if(v.discovered == false){
v.discovered = true;
ArrayList<String> keyset = new ArrayList<String>();
for(String key : v.node.keySet()){
//System.out.println(key);
keyset.add(key);
Object o = v.node.get(key);
if(o instanceof DBObject){
DBObject d = (DBObject) o;
s.push(new TrackNode(d,false));
}//else it is data
}
for(String key : keyset){
if(key.contains(".")){
Object o = v.node.get(key);
v.node = fixDBObjectDotKey(key, v.node);
}
}
}
}
if(reporting){
System.out.println("removedDotsResult");
System.out.println(bo.toString());
System.out.println("end");
}
return bo;
}
public String getNth(String prefix, Set<String> keys, int index){
int count =0;
for(String key : keys){
if(index == count) return key;
count++;
}
return null;
}
public DBObject removeDots2(DBObject bo, boolean reporting){
if(reporting) System.out.println("removeDots");
//first, check to see if the top level directory has any dots
for(String key : bo.keySet()){
//first, check all the leafs (one dir down only)!
Object o = bo.get(key);
if(o instanceof BasicDBObject){
BasicDBObject dboo = (BasicDBObject) o;
//deal with this concurent modification problem...
List<String> keys = new ArrayList();
for(String zkey : dboo.keySet()){
keys.add(zkey);
}
for(String k2 : keys){
if(k2.contains(".")){
fixDBObjectDotKey(k2,dboo);
}
}
}
}
//now fix and check the branches (can't do in the same loop because of this concurency bug
for(String key : bo.keySet()){
//then check the branch
if(key.contains(".")){
fixDBObjectDotKey(key, bo);
}
}
if(null != bo.get(".")){
bo.removeField(".");
}
if(null != bo.get("INFO")){
Object o = bo.get("INFO");
if(o instanceof DBObject){
if( ((DBObject)o).get(".") != null ){
((DBObject)o).removeField(".");
if( ((DBObject)o).keySet().size() == 0 ){
bo.put("INFO", ".");
}
}
}
}
if(reporting){
System.out.println("removedDotsResult");
System.out.println(bo.toString());
System.out.println("end");
}
return bo;
}
public DBObject fixDBObjectDotKey(String key, DBObject dbo){
String newkey = key.replaceAll("\\.", "_");
Object o = dbo.get(key);
if(o instanceof String){
String value = (String) o;
dbo.removeField(key);
dbo.put(newkey, value);
return dbo;
}
if(o instanceof BasicDBObject){
BasicDBObject value = (BasicDBObject) o;
dbo.removeField(key);
dbo.put(newkey, value);
return dbo;
}
if(o instanceof Integer){
Integer value = (Integer) o;
dbo.removeField(key);
dbo.put(newkey, value);
return dbo;
}
if(o instanceof Double){
Double value = (Double) o;
dbo.removeField(key);
dbo.put(newkey, value);
return dbo;
}
if(o instanceof Boolean){
Boolean value = (Boolean) o;
dbo.removeField(key);
dbo.put(newkey,value);
return dbo;
}
if(o instanceof BasicDBList){
BasicDBList value = (BasicDBList) o;
dbo.removeField(key);
dbo.put(newkey,value);
return dbo;
}
return dbo;
}
public void index(String workspace, VCF2VariantPipe vcf, boolean reporting){
JsonObject json = vcf.getJSONMetadata();
if(json == null){
return;
}
DB db = MongoConnection.getDB();
DBCollection col = db.getCollection(workspace);
//auto-index all reserved columns (as needed by the app)
indexReserved(col, reporting);
//auto-index all SNPEFF columns
indexSNPEFF(col, json, reporting);
//index format
indexFormat(vcf.getFormatKeys(), col, reporting);
}
/**
* index the reserved fields that we already know about...
* @param col
* @param reporting
*/
private void indexReserved(DBCollection col, boolean reporting){
indexField("FORMAT.GenotypePostitiveCount", col, reporting);
indexField("FORMAT.GenotypePositiveList",col, reporting); //don't need to index this as soon as the query is refactored
indexField("FORMAT.HeterozygousList",col, reporting);
indexField("FORMAT.HomozygousList",col, reporting);
}
private void indexSNPEFF(DBCollection col, JsonObject json, boolean reporting){
//indexField("INFO.SNPEFF_GENE_NAME", col);
for(String key: getSNPEFFColsFromJsonObj(json)){
indexField("INFO." + key, col, reporting);
}
}
/**
*
* @param json - the json metadata object
* @return
*/
public List<String> getSNPEFFColsFromJsonObj(JsonObject json){
ArrayList<String> a = new ArrayList<String>();
JsonObject header = json.getAsJsonObject("HEADER");
if(header == null) return a;
JsonObject info = header.getAsJsonObject("INFO");
if(info == null) return a;
Iterator<Map.Entry<String,JsonElement>> itter =info.entrySet().iterator();
while(itter.hasNext()){
Map.Entry<String, JsonElement> next = itter.next();
String key = next.getKey();
//System.out.println(key); //all keys in info
if(key.contains("SNPEFF")){
a.add(key);
}
}
return a;
}
/**
*
* @param formatFields - a list of fields to index (all format fields)
* @param col - the collection/workspace
* @param reporting - if we want to show what is going on in the tomcat log
*/
private void indexFormat(Set<String> formatFields, DBCollection col, boolean reporting){
for(String field : formatFields){
// FORMAT + . + . + field
String ikey = "FORMAT." + "min." + field;
String xkey = "FORMAT." + "max." + field;
if(reporting){ System.out.println( "Trying to index: " + ikey); }
indexField(ikey, col, reporting);
if(reporting){ System.out.println( "Trying to index: " + xkey); }
indexField(xkey, col, reporting);
}
}
private void indexFieldReplacingDot(String field, DBCollection col, boolean reporting){ //don't think this is ever used...
if(field.contains(".")){
field = field.replace(".","_");
}
indexField(field, col, reporting);
}
private Index indexUtil = new Index(); //use the indexUtil instead of the raw interface to prevent duplicate indexes!
private void indexField(String field, DBCollection col, boolean reporting){
if(reporting) System.out.println("index: " + field);
DBObject status = indexUtil.indexField(field,col);
if(reporting) System.out.println(status.toString());
}
public void updateMetadata(String workspace, String jsonUpdate, boolean reporting){
if(reporting) System.out.println("Saving Metadata to Workspace: " + workspace);
DB db = MongoConnection.getDB();
DBCollection col = db.getCollection(Tokens.METADATA_COLLECTION);
String query = "{\"key\":\""+workspace+"\"}";
BasicDBObject bo = (BasicDBObject) JSON.parse(query); //JSON2BasicDBObject
DBCursor dbc = col.find(bo);
DBObject result = null;
while(dbc.hasNext()){
result = dbc.next();
break; //there will only be one that matches!
}
String owner = result.get(Tokens.OWNER).toString();
String id = result.get("_id").toString();
String alias = result.get(Tokens.WORKSPACE_ALIAS).toString();
//key = workspace, passed in so we have that!
col.remove(bo);
//System.out.println("result: " + result.toString());
//note, we want to destroy whatever was in there with the new data (in case they try to load multiple times)
//but we have to keep owner, id, and key.
BasicDBObject replace = (BasicDBObject) JSON.parse(jsonUpdate);
//we need to remove any dot keys before we save the metadata to mongo.
DBObject replaceWODots = removeDots(replace, reporting);
// carry forward ALL existing keys/value pairs (owner, key, _id, alias, ready, status)
replaceWODots.putAll(result);
//now add the new keys
replaceWODots.put("timestamp", getISONow()); //The last time the workspace was touched.
if(reporting) System.out.println(replaceWODots.toString());
col.save(replaceWODots);
}
/**
* In the metadata returned, the format field from the vcf-variant pipe looks something like this:
*
* "FORMAT": {
* "PL": 1,
* "AD": 1,
* "GT": 1,
* "GQ": 1,
* "DP": 1,
* "MLPSAF": 1,
* "MLPSAC": 1
* }
*
* This method transforms it into this (which is what mongodb will store):
* "FORMAT": {
* "min": {
* "PL": 1,
* "AD": 1,
* "GT": 1,
* "GQ": 1,
* "DP": 1,
* "MLPSAF": 1,
* "MLPSAC": 1
* }
* "max": {
* "PL": 1,
* "AD": 1,
* "GT": 1,
* "GQ": 1,
* "DP": 1,
* "MLPSAF": 1,
* "MLPSAC": 1
* }
* }
*
* @param input
* @return
*/
public DBObject fixFormat(DBObject input){
DBObject output = input;
DBObject oldformat = (DBObject) input.removeField("FORMAT");
if(oldformat != null){
DBObject min = new BasicDBObject();
DBObject max = new BasicDBObject();
for(String s : oldformat.keySet()){
min.put(s,1);
max.put(s,1);
}
DBObject format = new BasicDBObject();
if(min.keySet().size() > 0 && max.keySet().size()>0){
format.put("min", min);
format.put("max", max);
}
output.put("FORMAT", format);
}
return output;
}
/**
*
* @return the current time in iso format
*/
public String getISONow(){
TimeZone tz = TimeZone.getTimeZone("UTC");
DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mmZ");
df.setTimeZone(tz);
String nowAsISO = df.format(new Date());
return nowAsISO;
}
public HashMap<Integer, String> getTestingCollection() {
return testingCollection;
}
public void setTestingCollection(HashMap<Integer, String> testingCollection) {
this.testingCollection = testingCollection;
}
public JsonObject getJson() {
return json;
}
public void setJson(JsonObject json) {
this.json = json;
}
public Mongo getM() {
return m;
}
public void setM(Mongo m) {
this.m = m;
}
public boolean isSaveSamples() {
return saveSamples;
}
public void setSaveSamples(boolean saveSamples) {
this.saveSamples = saveSamples;
}
public int getCacheSize() {
return cacheSize;
}
public void setCacheSize(int cacheSize) {
this.cacheSize = cacheSize;
}
public Task getContext() {
return context;
}
public void setContext(Task context) {
this.context = context;
}
public boolean isTesting() {
return testing;
}
public void setTesting(boolean testing) {
this.testing = testing;
}
@Override
public void setTypeAhead(TypeAheadInterface typeAheadInterface) {
this.typeAhead = typeAheadInterface;//To change body of implemented methods use File | Settings | File Templates.
}
public TypeAheadInterface getTypeAhead(){
return typeAhead;
}
public boolean isReporting() {
return reporting;
}
public void setReporting(boolean reporting) {
this.reporting = reporting;
}
public void setMetadata(DBObject metadata) {
this.metadata = metadata;
}
public Index getIndexUtil() {
return indexUtil;
}
public void setIndexUtil(Index indexUtil) {
this.indexUtil = indexUtil;
}
public Double getInitialLinePerformance() {
return initialLinePerformance;
}
public void setInitialLinePerformance(Double initialLinePerformance) {
this.initialLinePerformance = initialLinePerformance;
}
public Double getAverageLinePerformance() {
return averageLinePerformance;
}
public void setAverageLinePerformance(Double averageLinePerformance) {
this.averageLinePerformance = averageLinePerformance;
}
}
| Changes to be compatible with 3.0.8-SNAPSHOT of pipes. SAMPLES are no longer kept.
git-svn-id: 3727a5fd490da57ffbb4efc8eef39fd409567e2c@23454 8f329c14-d232-4f17-8684-7ee34322b8dc
| src/main/java/edu/mayo/ve/VCFParser/VCFParser.java | Changes to be compatible with 3.0.8-SNAPSHOT of pipes. SAMPLES are no longer kept. | <ide><path>rc/main/java/edu/mayo/ve/VCFParser/VCFParser.java
<ide> import com.google.gson.JsonObject;
<ide> import com.mongodb.*;
<ide> import com.mongodb.util.JSON;
<add>import com.tinkerpop.pipes.Pipe;
<add>import com.tinkerpop.pipes.transform.IdentityPipe;
<add>import com.tinkerpop.pipes.util.Pipeline;
<add>import edu.mayo.TypeAhead.TypeAheadCollection;
<add>import edu.mayo.TypeAhead.TypeAheadInterface;
<add>import edu.mayo.concurrency.exceptions.ProcessTerminatedException;
<add>import edu.mayo.concurrency.workerQueue.Task;
<add>import edu.mayo.index.Index;
<add>import edu.mayo.parsers.ParserInterface;
<add>import edu.mayo.pipes.MergePipe;
<add>import edu.mayo.pipes.ReplaceAllPipe;
<add>import edu.mayo.pipes.UNIX.CatPipe;
<add>import edu.mayo.pipes.bioinformatics.SampleDefinition;
<add>import edu.mayo.pipes.bioinformatics.VCF2VariantPipe;
<add>import edu.mayo.pipes.history.HistoryInPipe;
<add>import edu.mayo.pipes.iterators.Compressor;
<add>import edu.mayo.senders.FileSender;
<add>import edu.mayo.senders.Sender;
<add>import edu.mayo.util.MongoConnection;
<add>import edu.mayo.util.SystemProperties;
<add>import edu.mayo.util.Tokens;
<add>import edu.mayo.ve.resources.MetaData;
<add>import edu.mayo.ve.resources.SampleMeta;
<ide>
<ide> import java.io.File;
<del>import java.io.FileReader;
<add>import java.io.IOException;
<ide> import java.io.LineNumberReader;
<ide> import java.text.DateFormat;
<ide> import java.text.SimpleDateFormat;
<ide> import java.util.*;
<ide>
<del>import com.tinkerpop.pipes.Pipe;
<del>import com.tinkerpop.pipes.transform.IdentityPipe;
<del>import com.tinkerpop.pipes.util.Pipeline;
<del>
<del>import edu.mayo.TypeAhead.TypeAheadCollection;
<del>import edu.mayo.TypeAhead.TypeAheadInterface;
<del>import edu.mayo.concurrency.exceptions.ProcessTerminatedException;
<del>import edu.mayo.concurrency.workerQueue.Task;
<del>import edu.mayo.parsers.ParserInterface;
<del>import edu.mayo.pipes.MergePipe;
<del>import edu.mayo.pipes.PrintPipe;
<del>import edu.mayo.pipes.UNIX.CatPipe;
<del>import edu.mayo.pipes.bioinformatics.SampleDefinition;
<del>import edu.mayo.pipes.bioinformatics.VCF2VariantPipe;
<del>import edu.mayo.pipes.bioinformatics.VCFHeaderParser;
<del>import edu.mayo.pipes.history.HistoryInPipe;
<del>import edu.mayo.index.Index;
<del>import edu.mayo.pipes.iterators.Compressor;
<del>import edu.mayo.senders.FileSender;
<del>import edu.mayo.senders.Sender;
<del>import edu.mayo.util.Tokens;
<del>import edu.mayo.ve.resources.MetaData;
<del>import edu.mayo.util.SystemProperties;
<del>import java.io.IOException;
<del>import edu.mayo.pipes.bioinformatics.SampleDefinition;
<del>
<ide> //import edu.mayo.cli.CommandPlugin; TO DO! get this to work :(
<del>import edu.mayo.pipes.ReplaceAllPipe;
<del>import edu.mayo.util.MongoConnection;
<del>import edu.mayo.ve.resources.SampleMeta;
<del>
<del>import java.util.Date;
<ide>
<ide>
<ide> /**
<ide> /** testingCollection contains all of the objects placed into the workspace from parsing the VCF */
<ide> HashMap<Integer,String> testingCollection = new HashMap<Integer,String>();
<ide> JsonObject json = null;
<del> private boolean saveSamples = false;
<ide> /** @param context - the execution context (so we can kill the process if needed) -- can be null */
<ide> private Task context = null;
<ide> /** @param typeAhead - the implementation for where value sets will be stored for providing type-ahead functionality. */
<ide> System.out.println("#mongo_server: " + sysprops.get("mongo_server") );
<ide> System.out.println("#mongo port: " + new Integer(sysprops.get("mongo_port")));
<ide> parser.setReporting(true);
<del> int datalines = parser.parse(null, infile, workspace, 50000, false, true, true);
<add> int datalines = parser.parse(null, infile, workspace, 50000, false, true);
<ide> parser.checkAndUpdateLoadStatus(workspace, datalines, true);
<ide> parser.m.close();
<ide> //note the following will only work if you have a document in mongo like:
<ide>
<ide> /** legacy interface, keep it in place for testing */
<ide> public int parse(Task context, String infile, String workspace, int typeAheadCacheSize) throws ProcessTerminatedException {
<del> return parse(context, infile, workspace, typeAheadCacheSize, false, false, true);
<add> return parse(context, infile, workspace, typeAheadCacheSize, false, false);
<ide> }
<ide>
<ide> /** legacy interface, keep it in place for testing */
<ide> public int parse(Task context, String infile, String workspace, int typeAheadCacheSize, boolean testing) throws ProcessTerminatedException{
<del> return parse(context, infile, workspace, typeAheadCacheSize, testing, false, true);
<add> return parse(context, infile, workspace, typeAheadCacheSize, testing, false);
<ide> }
<ide>
<ide> /** legacy interface, keep it in place for testing */
<del> public int parse(Task context, String infile, String workspace, int typeAheadCacheSize, boolean testing, boolean reporting, boolean saveSamples) throws ProcessTerminatedException {
<add> public int parse(Task context, String infile, String workspace, int typeAheadCacheSize, boolean testing, boolean reporting) throws ProcessTerminatedException {
<ide> typeAhead = new TypeAheadCollection();
<del> this.saveSamples = saveSamples;
<ide> this.reporting = reporting;
<ide> this.testing = testing;
<ide> return parse(infile, workspace);
<ide> //System.out.println(s);
<ide> BasicDBObject bo = (BasicDBObject) JSON.parse(s);
<ide>
<del> if(saveSamples == false){
<del> bo = removeSamples(bo);
<del> }
<del>
<ide> //for type-ahead, we need access to the metadata inside the loop, try to force that here
<ide> if(jsonmeta == null){
<ide> //System.out.println(vcf.getJSONMetadata().toString());
<ide> */
<ide> public DBObject getMetadata(){
<ide> return metadata;
<del> }
<del>
<del> public BasicDBObject removeSamples(BasicDBObject o){
<del> o.removeField("samples");
<del> return o;
<ide> }
<ide>
<ide> public int parse(Task task, String inputVCFFile, String workspace, TypeAheadInterface typeAhead, boolean testing) throws ProcessTerminatedException {
<ide> this.m = m;
<ide> }
<ide>
<del> public boolean isSaveSamples() {
<del> return saveSamples;
<del> }
<del>
<del> public void setSaveSamples(boolean saveSamples) {
<del> this.saveSamples = saveSamples;
<del> }
<del>
<ide> public int getCacheSize() {
<ide> return cacheSize;
<ide> } |
|
Java | apache-2.0 | 1d06628560e74f48cff7140a775da382943646b3 | 0 | apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.core.executor.sql.execute.result;
import com.google.common.base.Optional;
import lombok.SneakyThrows;
import org.apache.shardingsphere.core.encrypt.ShardingEncryptorEngine;
import org.apache.shardingsphere.core.rule.ShardingRule;
import org.apache.shardingsphere.spi.algorithm.encrypt.ShardingEncryptor;
import org.junit.Before;
import org.junit.Test;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.Collections;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class QueryResultMetaDataTest {
private QueryResultMetaData queryResultMetaData;
@Before
@SneakyThrows
public void setUp() {
ResultSetMetaData resultSetMetaData = getResultMetaData();
ShardingRule shardingRule = getShardingRule();
}
private ShardingRule getShardingRule() {
ShardingEncryptor shardingEncryptor = mock(ShardingEncryptor.class);
ShardingEncryptorEngine shardingEncryptorEngine = mock(ShardingEncryptorEngine.class);
when(shardingEncryptorEngine.getShardingEncryptor(anyString(), anyString())).thenReturn(Optional.of(shardingEncryptor));
ShardingRule result = mock(ShardingRule.class);
when(result.getShardingEncryptorEngine()).thenReturn(shardingEncryptorEngine);
when(result.getLogicTableNames(anyString())).thenReturn(Collections.<String>emptyList());
}
private ResultSetMetaData getResultMetaData() throws SQLException {
ResultSetMetaData result = mock(ResultSetMetaData.class);
when(result.getColumnCount()).thenReturn(1);
when(result.getColumnName(anyInt())).thenReturn("column");
when(result.getColumnLabel(anyInt())).thenReturn("label");
when(result.getTableName(anyInt())).thenReturn("table");
}
@Test
public void testGetColumnCount() {
}
@Test
public void testGetColumnLabel() {
}
@Test
public void testGetColumnName() {
}
@Test
public void testGetColumnIndex() {
}
@Test
public void testGetShardingEncryptor() {
}
}
| sharding-core/src/test/java/org/apache/shardingsphere/core/executor/sql/execute/result/QueryResultMetaDataTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.core.executor.sql.execute.result;
import lombok.SneakyThrows;
import org.junit.Before;
import org.junit.Test;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class QueryResultMetaDataTest {
private QueryResultMetaData queryResultMetaData;
@Before
@SneakyThrows
public void setUp() {
getResultMetaData();
}
private ResultSetMetaData getResultMetaData() throws SQLException {
ResultSetMetaData result = mock(ResultSetMetaData.class);
when(result.getColumnCount()).thenReturn(1);
when(result.getColumnName(anyInt())).thenReturn("column");
when(result.getColumnLabel(anyInt())).thenReturn("label");
when(result.getTableName(anyInt())).thenReturn("table");
}
@Test
public void testGetColumnCount() {
}
@Test
public void testGetColumnLabel() {
}
@Test
public void testGetColumnName() {
}
@Test
public void testGetColumnIndex() {
}
@Test
public void testGetShardingEncryptor() {
}
}
| getShardingRule()
| sharding-core/src/test/java/org/apache/shardingsphere/core/executor/sql/execute/result/QueryResultMetaDataTest.java | getShardingRule() | <ide><path>harding-core/src/test/java/org/apache/shardingsphere/core/executor/sql/execute/result/QueryResultMetaDataTest.java
<ide>
<ide> package org.apache.shardingsphere.core.executor.sql.execute.result;
<ide>
<add>import com.google.common.base.Optional;
<ide> import lombok.SneakyThrows;
<add>import org.apache.shardingsphere.core.encrypt.ShardingEncryptorEngine;
<add>import org.apache.shardingsphere.core.rule.ShardingRule;
<add>import org.apache.shardingsphere.spi.algorithm.encrypt.ShardingEncryptor;
<ide> import org.junit.Before;
<ide> import org.junit.Test;
<ide>
<ide> import java.sql.ResultSetMetaData;
<ide> import java.sql.SQLException;
<add>import java.util.Collections;
<ide>
<ide> import static org.mockito.ArgumentMatchers.anyInt;
<add>import static org.mockito.ArgumentMatchers.anyString;
<ide> import static org.mockito.Mockito.mock;
<ide> import static org.mockito.Mockito.when;
<ide>
<ide> @Before
<ide> @SneakyThrows
<ide> public void setUp() {
<del> getResultMetaData();
<add> ResultSetMetaData resultSetMetaData = getResultMetaData();
<add> ShardingRule shardingRule = getShardingRule();
<add> }
<add>
<add> private ShardingRule getShardingRule() {
<add> ShardingEncryptor shardingEncryptor = mock(ShardingEncryptor.class);
<add> ShardingEncryptorEngine shardingEncryptorEngine = mock(ShardingEncryptorEngine.class);
<add> when(shardingEncryptorEngine.getShardingEncryptor(anyString(), anyString())).thenReturn(Optional.of(shardingEncryptor));
<add> ShardingRule result = mock(ShardingRule.class);
<add> when(result.getShardingEncryptorEngine()).thenReturn(shardingEncryptorEngine);
<add> when(result.getLogicTableNames(anyString())).thenReturn(Collections.<String>emptyList());
<ide> }
<ide>
<ide> private ResultSetMetaData getResultMetaData() throws SQLException { |
|
Java | agpl-3.0 | 1df8f54576d39e730d3e707866722971804e4122 | 0 | JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio | /*
* FindOutputPane.java
*
* Copyright (C) 2009-19 by RStudio, Inc.
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.workbench.views.output.find;
import com.google.gwt.core.client.GWT;
import com.google.gwt.dom.client.NativeEvent;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.dom.client.TableRowElement;
import com.google.gwt.event.dom.client.*;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.safehtml.shared.SafeHtmlBuilder;
import com.google.gwt.user.client.ui.CheckBox;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.ScrollPanel;
import com.google.gwt.user.client.ui.SimplePanel;
import com.google.gwt.user.client.ui.Widget;
import com.google.inject.Inject;
import org.rstudio.core.client.CodeNavigationTarget;
import org.rstudio.core.client.command.AppCommand;
import org.rstudio.core.client.dom.DomUtils;
import org.rstudio.core.client.events.EnsureVisibleEvent;
import org.rstudio.core.client.events.HasSelectionCommitHandlers;
import org.rstudio.core.client.events.SelectionCommitEvent;
import org.rstudio.core.client.events.SelectionCommitHandler;
import org.rstudio.core.client.widget.*;
import org.rstudio.core.client.widget.events.SelectionChangedHandler;
import org.rstudio.studio.client.workbench.commands.Commands;
import org.rstudio.studio.client.workbench.ui.WorkbenchPane;
import org.rstudio.studio.client.workbench.views.output.find.model.FindResult;
import java.util.ArrayList;
public class FindOutputPane extends WorkbenchPane
implements FindOutputPresenter.Display,
HasSelectionCommitHandlers<CodeNavigationTarget>
{
@Inject
public FindOutputPane(Commands commands)
{
super("Find Results");
commands_ = commands;
ensureWidget();
}
@Override
protected Toolbar createMainToolbar()
{
Toolbar toolbar = new Toolbar("Find Output Tab");
searchLabel_ = new Label();
toolbar.addLeftWidget(searchLabel_);
viewReplaceButton_ = new SmallButton("Replace");
viewReplaceButton_.getElement().getStyle().setMarginLeft(9, Unit.PX);
toolbar.addLeftWidget(viewReplaceButton_);
viewReplaceButton_.addClickHandler(new ClickHandler()
{
@Override
public void onClick(ClickEvent event)
{
toggleReplaceToolbar();
}
});
stopSearch_ = new ToolbarButton(
ToolbarButton.NoText,
"Stop find in files",
commands_.interruptR().getImageResource());
stopSearch_.setVisible(false);
toolbar.addRightWidget(stopSearch_);
return toolbar;
}
@Override
protected SecondaryToolbar createSecondaryToolbar()
{
replaceToolbar_ = new SecondaryToolbar("Replace");
replaceMode_ = true;
replaceLabel_ = new Label();
replaceToolbar_.addLeftWidget(replaceLabel_);
replaceTextBox_ = new TextBoxWithCue("Replace in Files");
replaceToolbar_.addLeftWidget(replaceTextBox_);
replaceRegex_ = new CheckBox();
replaceRegexLabel_ =
new CheckboxLabel(replaceRegex_, "Regular expression").getLabel();
replaceRegex_.getElement().getStyle().setMarginLeft(9, Unit.PX);
replaceRegex_.getElement().getStyle().setMarginRight(0, Unit.PX);
replaceToolbar_.addLeftWidget(replaceRegex_);
replaceRegexLabel_.getElement().getStyle().setMarginRight(9, Unit.PX);
replaceToolbar_.addLeftWidget(replaceRegexLabel_);
useGitIgnore_ = new CheckBox();
useGitIgnoreLabel_ =
new CheckboxLabel(useGitIgnore_, "Use .gitignore").getLabel();
useGitIgnore_.getElement().getStyle().setMarginRight(0, Unit.PX);
replaceToolbar_.addLeftWidget(useGitIgnore_);
useGitIgnoreLabel_.getElement().getStyle().setMarginRight(9, Unit.PX);
replaceToolbar_.addLeftWidget(useGitIgnoreLabel_);
replaceAllButton_ = new SmallButton("Replace All");
replaceAllButton_.addClickHandler(new ClickHandler()
{
@Override
public void onClick(ClickEvent event)
{
//addReplaceMatches(replaceTextBox_.getValue());
}
});
replaceToolbar_.addLeftWidget(replaceAllButton_);
replaceTextBox_.addKeyUpHandler(new KeyUpHandler()
{
public void onKeyUp(KeyUpEvent event)
{
// !!! currently not acting properly when backspacing the last character
addReplaceMatches(replaceTextBox_.getValue());
if (!replaceMode_)
toggleReplaceMode();
}
});
return replaceToolbar_;
}
@Override
protected Widget createMainWidget()
{
context_ = new FindResultContext();
FindOutputResources resources = GWT.create(FindOutputResources.class);
resources.styles().ensureInjected();
table_ = new FastSelectTable<FindResult, CodeNavigationTarget, Object>(
new FindOutputCodec(resources),
resources.styles().selectedRow(),
true,
false);
FontSizer.applyNormalFontSize(table_);
table_.addStyleName(resources.styles().findOutput());
table_.addClickHandler(new ClickHandler()
{
@Override
public void onClick(ClickEvent event)
{
if (event.getNativeButton() != NativeEvent.BUTTON_LEFT)
return;
if (dblClick_.checkForDoubleClick(event.getNativeEvent()))
fireSelectionCommitted();
}
private final DoubleClickState dblClick_ = new DoubleClickState();
});
table_.addKeyDownHandler(new KeyDownHandler()
{
@Override
public void onKeyDown(KeyDownEvent event)
{
if (event.getNativeKeyCode() == KeyCodes.KEY_ENTER)
fireSelectionCommitted();
event.stopPropagation();
event.preventDefault();
}
});
replaceMode_ = false;
setSecondaryToolbarVisible(replaceMode_);
container_ = new SimplePanel();
container_.addStyleName("ace_editor_theme");
container_.setSize("100%", "100%");
statusPanel_ = new StatusPanel();
statusPanel_.setSize("100%", "100%");
scrollPanel_ = new ScrollPanel(table_);
scrollPanel_.setSize("100%", "100%");
container_.setWidget(scrollPanel_);
return container_;
}
private void toggleReplaceToolbar()
{
boolean isToolbarVisible = secondaryToolbar_.isVisible();
if (isToolbarVisible ||
!replaceTextBox_.getValue().isEmpty())
toggleReplaceMode();
setSecondaryToolbarVisible(!isToolbarVisible);
}
private void toggleReplaceMode()
{
replaceMode_ = !replaceMode_;
FindOutputResources resources = GWT.create(FindOutputResources.class);
if (replaceMode_)
table_.addStyleName(resources.styles().findOutputReplace());
else
{
table_.removeStyleName(resources.styles().findOutputReplace());
addReplaceMatches(new String());
}
}
private void fireSelectionCommitted()
{
ArrayList<CodeNavigationTarget> values = table_.getSelectedValues();
if (values.size() == 1)
SelectionCommitEvent.fire(this, values.get(0));
}
@Override
public void addMatches(ArrayList<FindResult> findResults)
{
int matchesToAdd = Math.min(findResults.size(), MAX_COUNT - matchCount_);
if (matchesToAdd > 0)
{
matchCount_ += matchesToAdd;
if (matchCount_ > 0 && container_.getWidget() != scrollPanel_)
container_.setWidget(scrollPanel_);
if (!replaceMode_)
context_.addMatches(findResults.subList(0, matchesToAdd));
table_.addItems(findResults.subList(0, matchesToAdd), false);
}
if (matchesToAdd != findResults.size())
showOverflow();
}
public void addReplaceMatches(String value)
{
table_.clear();
context_.updateFileMatches(value);
addMatches(context_.getFindResults());
}
@Override
public void clearMatches()
{
context_.reset();
table_.clear();
overflow_ = false;
matchCount_ = 0;
statusPanel_.setStatusText("");
container_.setWidget(statusPanel_);
}
@Override
public void showSearchCompleted()
{
if (matchCount_ == 0)
statusPanel_.setStatusText("(No results found)");
}
@Override
public void onSelected()
{
super.onSelected();
table_.focus();
ArrayList<Integer> indices = table_.getSelectedRowIndexes();
if (indices.isEmpty())
table_.selectNextRow();
}
@Override
public void ensureVisible(boolean activate)
{
fireEvent(new EnsureVisibleEvent(activate));
}
@Override
public HasClickHandlers getStopSearchButton()
{
return stopSearch_;
}
@Override
public void setStopSearchButtonVisible(boolean visible)
{
stopSearch_.setVisible(visible);
}
@Override
public void ensureSelectedRowIsVisible()
{
ArrayList<TableRowElement> rows = table_.getSelectedRows();
if (rows.size() > 0)
{
DomUtils.ensureVisibleVert(scrollPanel_.getElement(),
rows.get(0),
20);
}
}
@Override
public HandlerRegistration addSelectionChangedHandler(SelectionChangedHandler handler)
{
return table_.addSelectionChangedHandler(handler);
}
@Override
public void showOverflow()
{
if (overflow_)
return;
overflow_ = true;
ArrayList<FindResult> items = new ArrayList<FindResult>();
items.add(null);
table_.addItems(items, false);
}
@Override
public void updateSearchLabel(String query, String path)
{
SafeHtmlBuilder builder = new SafeHtmlBuilder();
builder.appendEscaped("Results for ")
.appendHtmlConstant("<strong>")
.appendEscaped(query)
.appendHtmlConstant("</strong>")
.appendEscaped(" in ")
.appendEscaped(path);
searchLabel_.getElement().setInnerHTML(builder.toSafeHtml().asString());
}
@Override
public void clearSearchLabel()
{
searchLabel_.setText("");
}
@Override
public HandlerRegistration addSelectionCommitHandler(SelectionCommitHandler<CodeNavigationTarget> handler)
{
return addHandler(handler, SelectionCommitEvent.getType());
}
private class StatusPanel extends HorizontalCenterPanel
{
public StatusPanel()
{
super(new Label(), 50);
label_ = (Label)getWidget();
}
public void setStatusText(String status)
{
label_.setText(status);
}
private final Label label_;
}
private FastSelectTable<FindResult, CodeNavigationTarget, Object> table_;
private FindResultContext context_;
private final Commands commands_;
private Label searchLabel_;
private ToolbarButton stopSearch_;
private SimplePanel container_;
private ScrollPanel scrollPanel_;
private StatusPanel statusPanel_;
private boolean overflow_ = false;
private int matchCount_;
private SmallButton viewReplaceButton_;
private SecondaryToolbar replaceToolbar_;
private boolean replaceMode_;
private Label replaceLabel_;
private CheckBox replaceRegex_;
private Label replaceRegexLabel_;
private CheckBox useGitIgnore_;
private Label useGitIgnoreLabel_;
private TextBoxWithCue replaceTextBox_;
private SmallButton replaceAllButton_;
// This must be the same as MAX_COUNT in SessionFind.cpp
private static final int MAX_COUNT = 1000;
}
| src/gwt/src/org/rstudio/studio/client/workbench/views/output/find/FindOutputPane.java | /*
* FindOutputPane.java
*
* Copyright (C) 2009-19 by RStudio, Inc.
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.workbench.views.output.find;
import com.google.gwt.core.client.GWT;
import com.google.gwt.dom.client.NativeEvent;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.dom.client.TableRowElement;
import com.google.gwt.event.dom.client.*;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.safehtml.shared.SafeHtmlBuilder;
import com.google.gwt.user.client.ui.CheckBox;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.ScrollPanel;
import com.google.gwt.user.client.ui.SimplePanel;
import com.google.gwt.user.client.ui.Widget;
import com.google.inject.Inject;
import org.rstudio.core.client.CodeNavigationTarget;
import org.rstudio.core.client.command.AppCommand;
import org.rstudio.core.client.dom.DomUtils;
import org.rstudio.core.client.events.EnsureVisibleEvent;
import org.rstudio.core.client.events.HasSelectionCommitHandlers;
import org.rstudio.core.client.events.SelectionCommitEvent;
import org.rstudio.core.client.events.SelectionCommitHandler;
import org.rstudio.core.client.widget.*;
import org.rstudio.core.client.widget.events.SelectionChangedHandler;
import org.rstudio.studio.client.workbench.commands.Commands;
import org.rstudio.studio.client.workbench.ui.WorkbenchPane;
import org.rstudio.studio.client.workbench.views.output.find.model.FindResult;
import java.util.ArrayList;
public class FindOutputPane extends WorkbenchPane
implements FindOutputPresenter.Display,
HasSelectionCommitHandlers<CodeNavigationTarget>
{
@Inject
public FindOutputPane(Commands commands)
{
super("Find Results");
commands_ = commands;
ensureWidget();
}
@Override
protected Toolbar createMainToolbar()
{
Toolbar toolbar = new Toolbar("Find Output Tab");
searchLabel_ = new Label();
toolbar.addLeftWidget(searchLabel_);
viewReplaceButton_ = new SmallButton("Replace");
toolbar.addLeftWidget(viewReplaceButton_);
viewReplaceButton_.addClickHandler(new ClickHandler()
{
@Override
public void onClick(ClickEvent event)
{
toggleReplaceView();
}
});
stopSearch_ = new ToolbarButton(
ToolbarButton.NoText,
"Stop find in files",
commands_.interruptR().getImageResource());
stopSearch_.setVisible(false);
toolbar.addRightWidget(stopSearch_);
return toolbar;
}
@Override
protected SecondaryToolbar createSecondaryToolbar()
{
replaceToolbar_ = new SecondaryToolbar("Replace");
replaceMode_ = true;
replaceLabel_ = new Label();
replaceToolbar_.addLeftWidget(replaceLabel_);
replaceTextBox_ = new TextBoxWithCue("Replace in Files");
replaceToolbar_.addLeftWidget(replaceTextBox_);
replaceRegex_ = new CheckBox();
replaceRegexLabel_ =
new CheckboxLabel(replaceRegex_, "Regular expression").getLabel();
replaceRegex_.getElement().getStyle().setMarginRight(0, Unit.PX);
replaceToolbar_.addLeftWidget(replaceRegex_);
replaceRegexLabel_.getElement().getStyle().setMarginRight(9, Unit.PX);
replaceToolbar_.addLeftWidget(replaceRegexLabel_);
replaceAllButton_ = new SmallButton("Replace All");
replaceAllButton_.addClickHandler(new ClickHandler()
{
@Override
public void onClick(ClickEvent event)
{
addReplaceMatches(replaceTextBox_.getValue());
}
});
replaceToolbar_.addLeftWidget(replaceAllButton_);
/*
replaceTextBox_.addKeyDownHandler(new KeyDownHandler()
{
public void onKeyDown(KeyDownEvent event);
{
}
});
*/
return replaceToolbar_;
}
@Override
protected Widget createMainWidget()
{
context_ = new FindResultContext();
FindOutputResources resources = GWT.create(FindOutputResources.class);
resources.styles().ensureInjected();
table_ = new FastSelectTable<FindResult, CodeNavigationTarget, Object>(
new FindOutputCodec(resources),
resources.styles().selectedRow(),
true,
false);
FontSizer.applyNormalFontSize(table_);
table_.addStyleName(resources.styles().findOutput());
table_.addClickHandler(new ClickHandler()
{
@Override
public void onClick(ClickEvent event)
{
if (event.getNativeButton() != NativeEvent.BUTTON_LEFT)
return;
if (dblClick_.checkForDoubleClick(event.getNativeEvent()))
fireSelectionCommitted();
}
private final DoubleClickState dblClick_ = new DoubleClickState();
});
table_.addKeyDownHandler(new KeyDownHandler()
{
@Override
public void onKeyDown(KeyDownEvent event)
{
if (event.getNativeKeyCode() == KeyCodes.KEY_ENTER)
fireSelectionCommitted();
event.stopPropagation();
event.preventDefault();
}
});
replaceMode_ = false;
setSecondaryToolbarVisible(replaceMode_);
container_ = new SimplePanel();
container_.addStyleName("ace_editor_theme");
container_.setSize("100%", "100%");
statusPanel_ = new StatusPanel();
statusPanel_.setSize("100%", "100%");
scrollPanel_ = new ScrollPanel(table_);
scrollPanel_.setSize("100%", "100%");
container_.setWidget(scrollPanel_);
return container_;
}
private void toggleReplaceView()
{
replaceMode_ = !replaceMode_;
setSecondaryToolbarVisible(replaceMode_);
FindOutputResources resources = GWT.create(FindOutputResources.class);
if (replaceMode_)
{
table_.addStyleName(resources.styles().findOutputReplace());
}
else
{
table_.removeStyleName(resources.styles().findOutputReplace());
}
}
private void fireSelectionCommitted()
{
ArrayList<CodeNavigationTarget> values = table_.getSelectedValues();
if (values.size() == 1)
SelectionCommitEvent.fire(this, values.get(0));
}
@Override
public void addMatches(ArrayList<FindResult> findResults)
{
int matchesToAdd = Math.min(findResults.size(), MAX_COUNT - matchCount_);
if (matchesToAdd > 0)
{
matchCount_ += matchesToAdd;
if (matchCount_ > 0 && container_.getWidget() != scrollPanel_)
container_.setWidget(scrollPanel_);
if (!replaceMode_)
context_.addMatches(findResults.subList(0, matchesToAdd));
table_.addItems(findResults.subList(0, matchesToAdd), false);
}
if (matchesToAdd != findResults.size())
showOverflow();
}
public void addReplaceMatches(String value)
{
table_.clear();
context_.updateFileMatches(value);
addMatches(context_.getFindResults());
}
@Override
public void clearMatches()
{
context_.reset();
table_.clear();
overflow_ = false;
matchCount_ = 0;
statusPanel_.setStatusText("");
container_.setWidget(statusPanel_);
}
@Override
public void showSearchCompleted()
{
if (matchCount_ == 0)
statusPanel_.setStatusText("(No results found)");
}
@Override
public void onSelected()
{
super.onSelected();
table_.focus();
ArrayList<Integer> indices = table_.getSelectedRowIndexes();
if (indices.isEmpty())
table_.selectNextRow();
}
@Override
public void ensureVisible(boolean activate)
{
fireEvent(new EnsureVisibleEvent(activate));
}
@Override
public HasClickHandlers getStopSearchButton()
{
return stopSearch_;
}
@Override
public void setStopSearchButtonVisible(boolean visible)
{
stopSearch_.setVisible(visible);
}
@Override
public void ensureSelectedRowIsVisible()
{
ArrayList<TableRowElement> rows = table_.getSelectedRows();
if (rows.size() > 0)
{
DomUtils.ensureVisibleVert(scrollPanel_.getElement(),
rows.get(0),
20);
}
}
@Override
public HandlerRegistration addSelectionChangedHandler(SelectionChangedHandler handler)
{
return table_.addSelectionChangedHandler(handler);
}
@Override
public void showOverflow()
{
if (overflow_)
return;
overflow_ = true;
ArrayList<FindResult> items = new ArrayList<FindResult>();
items.add(null);
table_.addItems(items, false);
}
@Override
public void updateSearchLabel(String query, String path)
{
SafeHtmlBuilder builder = new SafeHtmlBuilder();
builder.appendEscaped("Results for ")
.appendHtmlConstant("<strong>")
.appendEscaped(query)
.appendHtmlConstant("</strong>")
.appendEscaped(" in ")
.appendEscaped(path);
searchLabel_.getElement().setInnerHTML(builder.toSafeHtml().asString());
}
@Override
public void clearSearchLabel()
{
searchLabel_.setText("");
}
@Override
public HandlerRegistration addSelectionCommitHandler(SelectionCommitHandler<CodeNavigationTarget> handler)
{
return addHandler(handler, SelectionCommitEvent.getType());
}
private class StatusPanel extends HorizontalCenterPanel
{
public StatusPanel()
{
super(new Label(), 50);
label_ = (Label)getWidget();
}
public void setStatusText(String status)
{
label_.setText(status);
}
private final Label label_;
}
private FastSelectTable<FindResult, CodeNavigationTarget, Object> table_;
private FindResultContext context_;
private final Commands commands_;
private Label searchLabel_;
private ToolbarButton stopSearch_;
private SimplePanel container_;
private ScrollPanel scrollPanel_;
private StatusPanel statusPanel_;
private boolean overflow_ = false;
private int matchCount_;
private SmallButton viewReplaceButton_;
private SecondaryToolbar replaceToolbar_;
private boolean replaceMode_;
private Label replaceLabel_;
private Label replaceRegexLabel_;
private CheckBox replaceRegex_;
private TextBoxWithCue replaceTextBox_;
private SmallButton replaceAllButton_;
// This must be the same as MAX_COUNT in SessionFind.cpp
private static final int MAX_COUNT = 1000;
}
| fix up find in files pane
| src/gwt/src/org/rstudio/studio/client/workbench/views/output/find/FindOutputPane.java | fix up find in files pane | <ide><path>rc/gwt/src/org/rstudio/studio/client/workbench/views/output/find/FindOutputPane.java
<ide> toolbar.addLeftWidget(searchLabel_);
<ide>
<ide> viewReplaceButton_ = new SmallButton("Replace");
<add> viewReplaceButton_.getElement().getStyle().setMarginLeft(9, Unit.PX);
<ide> toolbar.addLeftWidget(viewReplaceButton_);
<ide> viewReplaceButton_.addClickHandler(new ClickHandler()
<ide> {
<ide> @Override
<ide> public void onClick(ClickEvent event)
<ide> {
<del> toggleReplaceView();
<add> toggleReplaceToolbar();
<ide> }
<ide> });
<ide>
<ide> replaceRegex_ = new CheckBox();
<ide> replaceRegexLabel_ =
<ide> new CheckboxLabel(replaceRegex_, "Regular expression").getLabel();
<add> replaceRegex_.getElement().getStyle().setMarginLeft(9, Unit.PX);
<ide> replaceRegex_.getElement().getStyle().setMarginRight(0, Unit.PX);
<ide> replaceToolbar_.addLeftWidget(replaceRegex_);
<ide> replaceRegexLabel_.getElement().getStyle().setMarginRight(9, Unit.PX);
<ide> replaceToolbar_.addLeftWidget(replaceRegexLabel_);
<ide>
<add> useGitIgnore_ = new CheckBox();
<add> useGitIgnoreLabel_ =
<add> new CheckboxLabel(useGitIgnore_, "Use .gitignore").getLabel();
<add> useGitIgnore_.getElement().getStyle().setMarginRight(0, Unit.PX);
<add> replaceToolbar_.addLeftWidget(useGitIgnore_);
<add> useGitIgnoreLabel_.getElement().getStyle().setMarginRight(9, Unit.PX);
<add> replaceToolbar_.addLeftWidget(useGitIgnoreLabel_);
<add>
<ide> replaceAllButton_ = new SmallButton("Replace All");
<ide> replaceAllButton_.addClickHandler(new ClickHandler()
<ide> {
<ide> @Override
<ide> public void onClick(ClickEvent event)
<ide> {
<del> addReplaceMatches(replaceTextBox_.getValue());
<add> //addReplaceMatches(replaceTextBox_.getValue());
<ide> }
<ide> });
<ide> replaceToolbar_.addLeftWidget(replaceAllButton_);
<ide>
<del> /*
<del> replaceTextBox_.addKeyDownHandler(new KeyDownHandler()
<del> {
<del> public void onKeyDown(KeyDownEvent event);
<add> replaceTextBox_.addKeyUpHandler(new KeyUpHandler()
<add> {
<add> public void onKeyUp(KeyUpEvent event)
<ide> {
<add> // !!! currently not acting properly when backspacing the last character
<add> addReplaceMatches(replaceTextBox_.getValue());
<add> if (!replaceMode_)
<add> toggleReplaceMode();
<ide> }
<ide> });
<del> */
<ide>
<ide> return replaceToolbar_;
<ide> }
<ide> return container_;
<ide> }
<ide>
<del> private void toggleReplaceView()
<add> private void toggleReplaceToolbar()
<add> {
<add> boolean isToolbarVisible = secondaryToolbar_.isVisible();
<add> if (isToolbarVisible ||
<add> !replaceTextBox_.getValue().isEmpty())
<add> toggleReplaceMode();
<add> setSecondaryToolbarVisible(!isToolbarVisible);
<add> }
<add>
<add> private void toggleReplaceMode()
<ide> {
<ide> replaceMode_ = !replaceMode_;
<del> setSecondaryToolbarVisible(replaceMode_);
<del>
<ide> FindOutputResources resources = GWT.create(FindOutputResources.class);
<ide> if (replaceMode_)
<del> {
<ide> table_.addStyleName(resources.styles().findOutputReplace());
<del> }
<ide> else
<ide> {
<ide> table_.removeStyleName(resources.styles().findOutputReplace());
<add> addReplaceMatches(new String());
<ide> }
<ide> }
<ide>
<ide> private SecondaryToolbar replaceToolbar_;
<ide> private boolean replaceMode_;
<ide> private Label replaceLabel_;
<add> private CheckBox replaceRegex_;
<ide> private Label replaceRegexLabel_;
<del> private CheckBox replaceRegex_;
<add> private CheckBox useGitIgnore_;
<add> private Label useGitIgnoreLabel_;
<ide> private TextBoxWithCue replaceTextBox_;
<ide> private SmallButton replaceAllButton_;
<ide> |
|
Java | mit | 123ca970302319cb1561e18577d30124a06ecd11 | 0 | michel-kraemer/actson | // MIT License
//
// Copyright (c) 2016 Michel Kraemer
// Copyright (c) 2005 JSON.org
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package de.undercouch.actson;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
/**
* <p>A non-blocking, event-based JSON parser.</p>
* <p>The parser gets input data from a feeder that can be accessed through
* {@link #getFeeder()}. See {@link JsonFeeder} for more details.</p>
* @author Michel Kraemer
* @author JSON.org
* @since 1.0.0
*/
public class JsonParser {
private static final int __ = -1; // the universal error code
/**
* Characters are mapped into these 31 character classes. This allows for
* a significant reduction in the size of the state transition table.
*/
private static final int C_SPACE = 0; // space
private static final int C_WHITE = 1; // other whitespace
private static final int C_LCURB = 2; // {
private static final int C_RCURB = 3; // }
private static final int C_LSQRB = 4; // [
private static final int C_RSQRB = 5; // ]
private static final int C_COLON = 6; // :
private static final int C_COMMA = 7; // ,
private static final int C_QUOTE = 8; // "
private static final int C_BACKS = 9; // \
private static final int C_SLASH = 10; // /
private static final int C_PLUS = 11; // +
private static final int C_MINUS = 12; // -
private static final int C_POINT = 13; // .
private static final int C_ZERO = 14; // 0
private static final int C_DIGIT = 15; // 123456789
private static final int C_LOW_A = 16; // a
private static final int C_LOW_B = 17; // b
private static final int C_LOW_C = 18; // c
private static final int C_LOW_D = 19; // d
private static final int C_LOW_E = 20; // e
private static final int C_LOW_F = 21; // f
private static final int C_LOW_L = 22; // l
private static final int C_LOW_N = 23; // n
private static final int C_LOW_R = 24; // r
private static final int C_LOW_S = 25; // s
private static final int C_LOW_T = 26; // t
private static final int C_LOW_U = 27; // u
private static final int C_ABCDF = 28; // ABCDF
private static final int C_E = 29; // E
private static final int C_ETC = 30; // everything else
/**
* This array maps the 128 ASCII characters into character classes.
* The remaining Unicode characters should be mapped to C_ETC.
* Non-whitespace control characters are errors.
*/
private final static int[] ascii_class = {
__, __, __, __, __, __, __, __,
__, C_WHITE, C_WHITE, __, __, C_WHITE, __, __,
__, __, __, __, __, __, __, __,
__, __, __, __, __, __, __, __,
C_SPACE, C_ETC, C_QUOTE, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC,
C_ETC, C_ETC, C_ETC, C_PLUS, C_COMMA, C_MINUS, C_POINT, C_SLASH,
C_ZERO, C_DIGIT, C_DIGIT, C_DIGIT, C_DIGIT, C_DIGIT, C_DIGIT, C_DIGIT,
C_DIGIT, C_DIGIT, C_COLON, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC,
C_ETC, C_ABCDF, C_ABCDF, C_ABCDF, C_ABCDF, C_E, C_ABCDF, C_ETC,
C_ETC, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC,
C_ETC, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC,
C_ETC, C_ETC, C_ETC, C_LSQRB, C_BACKS, C_RSQRB, C_ETC, C_ETC,
C_ETC, C_LOW_A, C_LOW_B, C_LOW_C, C_LOW_D, C_LOW_E, C_LOW_F, C_ETC,
C_ETC, C_ETC, C_ETC, C_ETC, C_LOW_L, C_ETC, C_LOW_N, C_ETC,
C_ETC, C_ETC, C_LOW_R, C_LOW_S, C_LOW_T, C_LOW_U, C_ETC, C_ETC,
C_ETC, C_ETC, C_ETC, C_LCURB, C_ETC, C_RCURB, C_ETC, C_ETC
};
/**
* The state codes.
*/
private static final int GO = 0; // start
private static final int OK = 1; // ok
private static final int OB = 2; // object
private static final int KE = 3; // key
private static final int CO = 4; // colon
private static final int VA = 5; // value
private static final int AR = 6; // array
private static final int ST = 7; // string
private static final int ES = 8; // escape
private static final int U1 = 9; // u1
private static final int U2 = 10; // u2
private static final int U3 = 11; // u3
private static final int U4 = 12; // u4
private static final int MI = 13; // minus
private static final int ZE = 14; // zero
private static final int IN = 15; // integer
private static final int FR = 16; // fraction
private static final int E1 = 17; // e
private static final int E2 = 18; // ex
private static final int E3 = 19; // exp
private static final int T1 = 20; // tr
private static final int T2 = 21; // tru
private static final int T3 = 22; // true
private static final int F1 = 23; // fa
private static final int F2 = 24; // fal
private static final int F3 = 25; // fals
private static final int F4 = 26; // false
private static final int N1 = 27; // nu
private static final int N2 = 28; // nul
private static final int N3 = 29; // null
/**
* The state transition table takes the current state and the current symbol,
* and returns either a new state or an action. An action is represented as a
* negative number. A JSON text is accepted if at the end of the text the
* state is OK and if the mode is MODE_DONE.
*/
private static int[][] state_transition_table = {
/* white 1-9 ABCDF etc
space | { } [ ] : , " \ / + - . 0 | a b c d e f l n r s t u | E |*/
/*start GO*/ {GO,GO,-6,__,-5,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*ok OK*/ {OK,OK,__,-8,__,-7,__,-3,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*object OB*/ {OB,OB,__,-9,__,__,__,__,ST,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*key KE*/ {KE,KE,__,__,__,__,__,__,ST,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*colon CO*/ {CO,CO,__,__,__,__,-2,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*value VA*/ {VA,VA,-6,__,-5,__,__,__,ST,__,__,__,MI,__,ZE,IN,__,__,__,__,__,F1,__,N1,__,__,T1,__,__,__,__},
/*array AR*/ {AR,AR,-6,__,-5,-7,__,__,ST,__,__,__,MI,__,ZE,IN,__,__,__,__,__,F1,__,N1,__,__,T1,__,__,__,__},
/*string ST*/ {ST,__,ST,ST,ST,ST,ST,ST,-4,ES,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST},
/*escape ES*/ {__,__,__,__,__,__,__,__,ST,ST,ST,__,__,__,__,__,__,ST,__,__,__,ST,__,ST,ST,__,ST,U1,__,__,__},
/*u1 U1*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,U2,U2,U2,U2,U2,U2,U2,U2,__,__,__,__,__,__,U2,U2,__},
/*u2 U2*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,U3,U3,U3,U3,U3,U3,U3,U3,__,__,__,__,__,__,U3,U3,__},
/*u3 U3*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,U4,U4,U4,U4,U4,U4,U4,U4,__,__,__,__,__,__,U4,U4,__},
/*u4 U4*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,ST,ST,ST,ST,ST,ST,ST,ST,__,__,__,__,__,__,ST,ST,__},
/*minus MI*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,ZE,IN,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*zero ZE*/ {OK,OK,__,-8,__,-7,__,-3,__,__,__,__,__,FR,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*int IN*/ {OK,OK,__,-8,__,-7,__,-3,__,__,__,__,__,FR,IN,IN,__,__,__,__,E1,__,__,__,__,__,__,__,__,E1,__},
/*frac FR*/ {OK,OK,__,-8,__,-7,__,-3,__,__,__,__,__,__,FR,FR,__,__,__,__,E1,__,__,__,__,__,__,__,__,E1,__},
/*e E1*/ {__,__,__,__,__,__,__,__,__,__,__,E2,E2,__,E3,E3,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*ex E2*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,E3,E3,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*exp E3*/ {OK,OK,__,-8,__,-7,__,-3,__,__,__,__,__,__,E3,E3,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*tr T1*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,T2,__,__,__,__,__,__},
/*tru T2*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,T3,__,__,__},
/*true T3*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,OK,__,__,__,__,__,__,__,__,__,__},
/*fa F1*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,F2,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*fal F2*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,F3,__,__,__,__,__,__,__,__},
/*fals F3*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,F4,__,__,__,__,__},
/*false F4*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,OK,__,__,__,__,__,__,__,__,__,__},
/*nu N1*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,N2,__,__,__},
/*nul N2*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,N3,__,__,__,__,__,__,__,__},
/*null N3*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,OK,__,__,__,__,__,__,__,__},
};
/**
* These modes can be pushed on the stack.
*/
private static final int MODE_ARRAY = 0;
private static final int MODE_DONE = 1;
private static final int MODE_KEY = 2;
private static final int MODE_OBJECT = 3;
/**
* The stack containing the current modes
*/
private int[] stack;
/**
* The top of the stack (-1 if the stack is empty)
*/
private int top = -1;
/**
* The maximum number of modes on the stack
*/
private int depth = 2048;
/**
* The current state
*/
private int state;
/**
* Collects all characters if the current state is ST (String),
* IN (Integer), FR (Fraction) or the like
*/
private StringBuilder currentValue = new StringBuilder(128);
/**
* The number of characters processed by the JSON parser
* @since 1.1.0
*/
private int parsedCharacterCount = 0;
/**
* The feeder is used to get input to parse
*/
private final JsonFeeder feeder;
/**
* The first event returned by {@link #parse(char)}
*/
private int event1 = JsonEvent.NEED_MORE_INPUT;
/**
* The second event returned by {@link #parse(char)}
*/
private int event2 = JsonEvent.NEED_MORE_INPUT;
/**
* Push a mode onto the stack
* @param mode the mode to push
* @return false if there is overflow
*/
private boolean push(int mode) {
++top;
if (top >= stack.length) {
if (top >= depth) {
return false;
}
stack = Arrays.copyOf(stack, Math.min(stack.length * 2, depth));
}
stack[top] = mode;
return true;
}
/**
* Pop the stack, assuring that the current mode matches the expectation
* @param mode the expected mode
* @return false if there is underflow or if the modes mismatch
*/
private boolean pop(int mode) {
if (top < 0 || stack[top] != mode) {
return false;
}
--top;
return true;
}
/**
* Constructs a JSON parser that uses the UTF-8 charset to decode input data
*/
public JsonParser() {
this(StandardCharsets.UTF_8);
}
/**
* Constructs a JSON parser
* @param charset the charset that should be used to decode the
* parser's input data
*/
public JsonParser(Charset charset) {
this(new DefaultJsonFeeder(charset));
}
/**
* Constructs the JSON parser
* @param feeder the feeder that will provide the parser with input data
*/
public JsonParser(JsonFeeder feeder) {
stack = new int[16];
top = -1;
state = GO;
push(MODE_DONE);
this.feeder = feeder;
}
/**
* Set the maximum number of modes on the stack (basically the maximum number
* of nested objects/arrays in the JSON text to parse)
* @param depth the maximum number of modes
*/
public void setMaxDepth(int depth) {
this.depth = depth;
}
/**
* @return the maximum number of modes on the stack (basically the maximum
* number of nested objects/arrays in the JSON text to parse)
*/
public int getMaxDepth() {
return depth;
}
/**
* Call this method to proceed parsing the JSON text and to get the next
* event. The method returns {@link JsonEvent#NEED_MORE_INPUT} if it needs
* more input data from the parser's feeder.
* @return the next JSON event or {@link JsonEvent#NEED_MORE_INPUT} if more
* input is needed
*/
public int nextEvent() {
try {
while (event1 == JsonEvent.NEED_MORE_INPUT) {
if (!feeder.hasInput()) {
if (feeder.isDone()) {
return (state == OK && pop(MODE_DONE) ? JsonEvent.EOF : JsonEvent.ERROR);
}
return JsonEvent.NEED_MORE_INPUT;
}
parse(feeder.nextInput());
}
} catch (CharacterCodingException e) {
return JsonEvent.ERROR;
}
int r = event1;
if (event1 != JsonEvent.ERROR) {
event1 = event2;
event2 = JsonEvent.NEED_MORE_INPUT;
}
return r;
}
/**
* Get the feeder that can be used to provide more input to the parser
* @return the parser's feeder
*/
public JsonFeeder getFeeder() {
return feeder;
}
/**
* This function is called for each character (or partial character) in the
* JSON text. It can accept UTF-8, UTF-16, or UTF-32. It will set
* {@link #event1} and {@link #event2} accordingly. As a precondition these
* fields should have a value of {@link JsonEvent#NEED_MORE_INPUT}.
* @param nextChar the character to parse
*/
private void parse(char nextChar) {
parsedCharacterCount++;
// Determine the character's class.
int nextClass;
if (nextChar >= 128) {
nextClass = C_ETC;
} else {
nextClass = ascii_class[nextChar];
if (nextClass <= __) {
event1 = JsonEvent.ERROR;
return;
}
}
// Get the next state from the state transition table.
int nextState = state_transition_table[state][nextClass];
if (nextState >= 0) {
if (nextState >= ST && nextState <= E3) {
if (state < ST || state > E3) {
currentValue.setLength(0);
if (nextState != ST) {
currentValue.append(nextChar);
}
} else {
currentValue.append(nextChar);
}
} else if (nextState == OK) {
// end of token identified, convert state to result
event1 = stateToEvent();
}
// Change the state.
state = nextState;
} else {
// Or perform one of the actions.
switch (nextState) {
// empty }
case -9:
if (!pop(MODE_KEY)) {
event1 = JsonEvent.ERROR;
return;
}
state = OK;
event1 = JsonEvent.END_OBJECT;
break;
// }
case -8:
if (!pop(MODE_OBJECT)) {
event1 = JsonEvent.ERROR;
return;
}
state = OK;
event1 = JsonEvent.END_OBJECT;
break;
// ]
case -7:
if (!pop(MODE_ARRAY)) {
event1 = JsonEvent.ERROR;
return;
}
event1 = stateToEvent();
if (event1 == JsonEvent.NEED_MORE_INPUT) {
event1 = JsonEvent.END_ARRAY;
} else {
event2 = JsonEvent.END_ARRAY;
}
state = OK;
break;
// {
case -6:
if (!push(MODE_KEY)) {
event1 = JsonEvent.ERROR;
return;
}
state = OB;
event1 = JsonEvent.START_OBJECT;
break;
// [
case -5:
if (!push(MODE_ARRAY)) {
event1 = JsonEvent.ERROR;
return;
}
state = AR;
event1 = JsonEvent.START_ARRAY;
break;
// "
case -4:
switch (stack[top]) {
case MODE_KEY:
state = CO;
event1 = JsonEvent.FIELD_NAME;
break;
case MODE_ARRAY:
case MODE_OBJECT:
state = OK;
event1 = JsonEvent.VALUE_STRING;
break;
default:
event1 = JsonEvent.ERROR;
return;
}
break;
// ,
case -3:
switch (stack[top]) {
case MODE_OBJECT:
// A comma causes a flip from object mode to key mode.
if (!pop(MODE_OBJECT) || !push(MODE_KEY)) {
event1 = JsonEvent.ERROR;
return;
}
event1 = stateToEvent();
state = KE;
break;
case MODE_ARRAY:
event1 = stateToEvent();
state = VA;
break;
default:
event1 = JsonEvent.ERROR;
return;
}
break;
// :
case -2:
// A colon causes a flip from key mode to object mode.
if (!pop(MODE_KEY) || !push(MODE_OBJECT)) {
event1 = JsonEvent.ERROR;
return;
}
state = VA;
break;
// Bad action.
default:
event1 = JsonEvent.ERROR;
return;
}
}
}
/**
* Converts the current parser state to a JSON event
* @return the JSON event or {@link JsonEvent#NEED_MORE_INPUT} if the
* current state does not produce a JSON event
*/
private int stateToEvent() {
if (state == IN || state == ZE) {
return JsonEvent.VALUE_INT;
} else if (state == FR || state == E1 || state == E2 || state == E3) {
return JsonEvent.VALUE_DOUBLE;
} else if (state == T3) {
return JsonEvent.VALUE_TRUE;
} else if (state == F4) {
return JsonEvent.VALUE_FALSE;
} else if (state == N3) {
return JsonEvent.VALUE_NULL;
}
return JsonEvent.NEED_MORE_INPUT;
}
/**
* If the event returned by {@link #nextEvent()} was
* {@link JsonEvent#VALUE_STRING} this method will return the parsed string
* @return the parsed string
*/
public String getCurrentString() {
return currentValue.toString();
}
/**
* If the event returned by {@link #nextEvent()} was
* {@link JsonEvent#VALUE_INT} this method will return the parsed integer
* @return the parsed integer
*/
public int getCurrentInt() {
return Integer.parseInt(currentValue.toString());
}
/**
* If the event returned by {@link #nextEvent()} was
* {@link JsonEvent#VALUE_DOUBLE} this method will return the parsed double
* @return the parsed double
*/
public double getCurrentDouble() {
return Double.parseDouble(currentValue.toString());
}
/**
* <p>Get the number of characters processed by the JSON parser so far.</p>
* <p>Use this method to get the location of an event returned by
* {@link #nextEvent()}. Note that the character count is always greater than
* the actual position of the event in the parsed JSON text. For example, if
* {@link #nextEvent()} returns {@link JsonEvent#START_OBJECT} and the
* character count is <code>n</code>, the location of the <code>{</code>
* character is <code>n-1</code>. If {@link #nextEvent()} returns
* {@link JsonEvent#FIELD_NAME} and the parsed field name is
* <code>"id"</code>, the location is <code>n-4</code> because the field
* name is 4 characters long (including the quotes) and the parser has
* already processed all characters of it.</p>
* @return the character offset
* @since 1.1.0
*/
public int getParsedCharacterCount() {
return parsedCharacterCount;
}
}
| src/main/java/de/undercouch/actson/JsonParser.java | // MIT License
//
// Copyright (c) 2016 Michel Kraemer
// Copyright (c) 2005 JSON.org
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package de.undercouch.actson;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
/**
* <p>A non-blocking, event-based JSON parser.</p>
* <p>The parser gets input data from a feeder that can be accessed through
* {@link #getFeeder()}. See {@link JsonFeeder} for more details.</p>
* @author Michel Kraemer
* @author JSON.org
* @since 1.0.0
*/
public class JsonParser {
private static final int __ = -1; // the universal error code
/**
* Characters are mapped into these 31 character classes. This allows for
* a significant reduction in the size of the state transition table.
*/
private static final int C_SPACE = 0; // space
private static final int C_WHITE = 1; // other whitespace
private static final int C_LCURB = 2; // {
private static final int C_RCURB = 3; // }
private static final int C_LSQRB = 4; // [
private static final int C_RSQRB = 5; // ]
private static final int C_COLON = 6; // :
private static final int C_COMMA = 7; // ,
private static final int C_QUOTE = 8; // "
private static final int C_BACKS = 9; // \
private static final int C_SLASH = 10; // /
private static final int C_PLUS = 11; // +
private static final int C_MINUS = 12; // -
private static final int C_POINT = 13; // .
private static final int C_ZERO = 14; // 0
private static final int C_DIGIT = 15; // 123456789
private static final int C_LOW_A = 16; // a
private static final int C_LOW_B = 17; // b
private static final int C_LOW_C = 18; // c
private static final int C_LOW_D = 19; // d
private static final int C_LOW_E = 20; // e
private static final int C_LOW_F = 21; // f
private static final int C_LOW_L = 22; // l
private static final int C_LOW_N = 23; // n
private static final int C_LOW_R = 24; // r
private static final int C_LOW_S = 25; // s
private static final int C_LOW_T = 26; // t
private static final int C_LOW_U = 27; // u
private static final int C_ABCDF = 28; // ABCDF
private static final int C_E = 29; // E
private static final int C_ETC = 30; // everything else
/**
* This array maps the 128 ASCII characters into character classes.
* The remaining Unicode characters should be mapped to C_ETC.
* Non-whitespace control characters are errors.
*/
private final static int[] ascii_class = {
__, __, __, __, __, __, __, __,
__, C_WHITE, C_WHITE, __, __, C_WHITE, __, __,
__, __, __, __, __, __, __, __,
__, __, __, __, __, __, __, __,
C_SPACE, C_ETC, C_QUOTE, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC,
C_ETC, C_ETC, C_ETC, C_PLUS, C_COMMA, C_MINUS, C_POINT, C_SLASH,
C_ZERO, C_DIGIT, C_DIGIT, C_DIGIT, C_DIGIT, C_DIGIT, C_DIGIT, C_DIGIT,
C_DIGIT, C_DIGIT, C_COLON, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC,
C_ETC, C_ABCDF, C_ABCDF, C_ABCDF, C_ABCDF, C_E, C_ABCDF, C_ETC,
C_ETC, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC,
C_ETC, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC, C_ETC,
C_ETC, C_ETC, C_ETC, C_LSQRB, C_BACKS, C_RSQRB, C_ETC, C_ETC,
C_ETC, C_LOW_A, C_LOW_B, C_LOW_C, C_LOW_D, C_LOW_E, C_LOW_F, C_ETC,
C_ETC, C_ETC, C_ETC, C_ETC, C_LOW_L, C_ETC, C_LOW_N, C_ETC,
C_ETC, C_ETC, C_LOW_R, C_LOW_S, C_LOW_T, C_LOW_U, C_ETC, C_ETC,
C_ETC, C_ETC, C_ETC, C_LCURB, C_ETC, C_RCURB, C_ETC, C_ETC
};
/**
* The state codes.
*/
private static final int GO = 0; // start
private static final int OK = 1; // ok
private static final int OB = 2; // object
private static final int KE = 3; // key
private static final int CO = 4; // colon
private static final int VA = 5; // value
private static final int AR = 6; // array
private static final int ST = 7; // string
private static final int ES = 8; // escape
private static final int U1 = 9; // u1
private static final int U2 = 10; // u2
private static final int U3 = 11; // u3
private static final int U4 = 12; // u4
private static final int MI = 13; // minus
private static final int ZE = 14; // zero
private static final int IN = 15; // integer
private static final int FR = 16; // fraction
private static final int E1 = 17; // e
private static final int E2 = 18; // ex
private static final int E3 = 19; // exp
private static final int T1 = 20; // tr
private static final int T2 = 21; // tru
private static final int T3 = 22; // true
private static final int F1 = 23; // fa
private static final int F2 = 24; // fal
private static final int F3 = 25; // fals
private static final int F4 = 26; // false
private static final int N1 = 27; // nu
private static final int N2 = 28; // nul
private static final int N3 = 29; // null
/**
* The state transition table takes the current state and the current symbol,
* and returns either a new state or an action. An action is represented as a
* negative number. A JSON text is accepted if at the end of the text the
* state is OK and if the mode is MODE_DONE.
*/
private static int[][] state_transition_table = {
/* white 1-9 ABCDF etc
space | { } [ ] : , " \ / + - . 0 | a b c d e f l n r s t u | E |*/
/*start GO*/ {GO,GO,-6,__,-5,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*ok OK*/ {OK,OK,__,-8,__,-7,__,-3,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*object OB*/ {OB,OB,__,-9,__,__,__,__,ST,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*key KE*/ {KE,KE,__,__,__,__,__,__,ST,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*colon CO*/ {CO,CO,__,__,__,__,-2,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*value VA*/ {VA,VA,-6,__,-5,__,__,__,ST,__,__,__,MI,__,ZE,IN,__,__,__,__,__,F1,__,N1,__,__,T1,__,__,__,__},
/*array AR*/ {AR,AR,-6,__,-5,-7,__,__,ST,__,__,__,MI,__,ZE,IN,__,__,__,__,__,F1,__,N1,__,__,T1,__,__,__,__},
/*string ST*/ {ST,__,ST,ST,ST,ST,ST,ST,-4,ES,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST,ST},
/*escape ES*/ {__,__,__,__,__,__,__,__,ST,ST,ST,__,__,__,__,__,__,ST,__,__,__,ST,__,ST,ST,__,ST,U1,__,__,__},
/*u1 U1*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,U2,U2,U2,U2,U2,U2,U2,U2,__,__,__,__,__,__,U2,U2,__},
/*u2 U2*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,U3,U3,U3,U3,U3,U3,U3,U3,__,__,__,__,__,__,U3,U3,__},
/*u3 U3*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,U4,U4,U4,U4,U4,U4,U4,U4,__,__,__,__,__,__,U4,U4,__},
/*u4 U4*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,ST,ST,ST,ST,ST,ST,ST,ST,__,__,__,__,__,__,ST,ST,__},
/*minus MI*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,ZE,IN,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*zero ZE*/ {OK,OK,__,-8,__,-7,__,-3,__,__,__,__,__,FR,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*int IN*/ {OK,OK,__,-8,__,-7,__,-3,__,__,__,__,__,FR,IN,IN,__,__,__,__,E1,__,__,__,__,__,__,__,__,E1,__},
/*frac FR*/ {OK,OK,__,-8,__,-7,__,-3,__,__,__,__,__,__,FR,FR,__,__,__,__,E1,__,__,__,__,__,__,__,__,E1,__},
/*e E1*/ {__,__,__,__,__,__,__,__,__,__,__,E2,E2,__,E3,E3,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*ex E2*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,E3,E3,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*exp E3*/ {OK,OK,__,-8,__,-7,__,-3,__,__,__,__,__,__,E3,E3,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*tr T1*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,T2,__,__,__,__,__,__},
/*tru T2*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,T3,__,__,__},
/*true T3*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,OK,__,__,__,__,__,__,__,__,__,__},
/*fa F1*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,F2,__,__,__,__,__,__,__,__,__,__,__,__,__,__},
/*fal F2*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,F3,__,__,__,__,__,__,__,__},
/*fals F3*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,F4,__,__,__,__,__},
/*false F4*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,OK,__,__,__,__,__,__,__,__,__,__},
/*nu N1*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,N2,__,__,__},
/*nul N2*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,N3,__,__,__,__,__,__,__,__},
/*null N3*/ {__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,__,OK,__,__,__,__,__,__,__,__},
};
/**
* These modes can be pushed on the stack.
*/
private static final int MODE_ARRAY = 0;
private static final int MODE_DONE = 1;
private static final int MODE_KEY = 2;
private static final int MODE_OBJECT = 3;
/**
* The stack containing the current modes
*/
private int[] stack;
/**
* The top of the stack (-1 if the stack is empty)
*/
private int top = -1;
/**
* The maximum number of modes on the stack
*/
private int depth = 2048;
/**
* The current state
*/
private int state;
/**
* Collects all characters if the current state is ST (String),
* IN (Integer), FR (Fraction) or the like
*/
private StringBuilder currentValue;
/**
* The number of characters processed by the JSON parser
* @since 1.1.0
*/
private int parsedCharacterCount = 0;
/**
* The feeder is used to get input to parse
*/
private final JsonFeeder feeder;
/**
* The first event returned by {@link #parse(char)}
*/
private int event1 = JsonEvent.NEED_MORE_INPUT;
/**
* The second event returned by {@link #parse(char)}
*/
private int event2 = JsonEvent.NEED_MORE_INPUT;
/**
* Push a mode onto the stack
* @param mode the mode to push
* @return false if there is overflow
*/
private boolean push(int mode) {
++top;
if (top >= stack.length) {
if (top >= depth) {
return false;
}
stack = Arrays.copyOf(stack, Math.min(stack.length * 2, depth));
}
stack[top] = mode;
return true;
}
/**
* Pop the stack, assuring that the current mode matches the expectation
* @param mode the expected mode
* @return false if there is underflow or if the modes mismatch
*/
private boolean pop(int mode) {
if (top < 0 || stack[top] != mode) {
return false;
}
--top;
return true;
}
/**
* Constructs a JSON parser that uses the UTF-8 charset to decode input data
*/
public JsonParser() {
this(StandardCharsets.UTF_8);
}
/**
* Constructs a JSON parser
* @param charset the charset that should be used to decode the
* parser's input data
*/
public JsonParser(Charset charset) {
this(new DefaultJsonFeeder(charset));
}
/**
* Constructs the JSON parser
* @param feeder the feeder that will provide the parser with input data
*/
public JsonParser(JsonFeeder feeder) {
stack = new int[16];
top = -1;
state = GO;
push(MODE_DONE);
this.feeder = feeder;
}
/**
* Set the maximum number of modes on the stack (basically the maximum number
* of nested objects/arrays in the JSON text to parse)
* @param depth the maximum number of modes
*/
public void setMaxDepth(int depth) {
this.depth = depth;
}
/**
* @return the maximum number of modes on the stack (basically the maximum
* number of nested objects/arrays in the JSON text to parse)
*/
public int getMaxDepth() {
return depth;
}
/**
* Call this method to proceed parsing the JSON text and to get the next
* event. The method returns {@link JsonEvent#NEED_MORE_INPUT} if it needs
* more input data from the parser's feeder.
* @return the next JSON event or {@link JsonEvent#NEED_MORE_INPUT} if more
* input is needed
*/
public int nextEvent() {
try {
while (event1 == JsonEvent.NEED_MORE_INPUT) {
if (!feeder.hasInput()) {
if (feeder.isDone()) {
return (state == OK && pop(MODE_DONE) ? JsonEvent.EOF : JsonEvent.ERROR);
}
return JsonEvent.NEED_MORE_INPUT;
}
parse(feeder.nextInput());
}
} catch (CharacterCodingException e) {
return JsonEvent.ERROR;
}
int r = event1;
if (event1 != JsonEvent.ERROR) {
event1 = event2;
event2 = JsonEvent.NEED_MORE_INPUT;
}
return r;
}
/**
* Get the feeder that can be used to provide more input to the parser
* @return the parser's feeder
*/
public JsonFeeder getFeeder() {
return feeder;
}
/**
* This function is called for each character (or partial character) in the
* JSON text. It can accept UTF-8, UTF-16, or UTF-32. It will set
* {@link #event1} and {@link #event2} accordingly. As a precondition these
* fields should have a value of {@link JsonEvent#NEED_MORE_INPUT}.
* @param nextChar the character to parse
*/
private void parse(char nextChar) {
parsedCharacterCount++;
// Determine the character's class.
int nextClass;
if (nextChar >= 128) {
nextClass = C_ETC;
} else {
nextClass = ascii_class[nextChar];
if (nextClass <= __) {
event1 = JsonEvent.ERROR;
return;
}
}
// Get the next state from the state transition table.
int nextState = state_transition_table[state][nextClass];
if (nextState >= 0) {
if (nextState >= ST && nextState <= E3) {
if (state < ST || state > E3) {
currentValue = new StringBuilder();
if (nextState != ST) {
currentValue.append(nextChar);
}
} else {
currentValue.append(nextChar);
}
} else if (nextState == OK) {
// end of token identified, convert state to result
event1 = stateToEvent();
}
// Change the state.
state = nextState;
} else {
// Or perform one of the actions.
switch (nextState) {
// empty }
case -9:
if (!pop(MODE_KEY)) {
event1 = JsonEvent.ERROR;
return;
}
state = OK;
event1 = JsonEvent.END_OBJECT;
break;
// }
case -8:
if (!pop(MODE_OBJECT)) {
event1 = JsonEvent.ERROR;
return;
}
state = OK;
event1 = JsonEvent.END_OBJECT;
break;
// ]
case -7:
if (!pop(MODE_ARRAY)) {
event1 = JsonEvent.ERROR;
return;
}
event1 = stateToEvent();
if (event1 == JsonEvent.NEED_MORE_INPUT) {
event1 = JsonEvent.END_ARRAY;
} else {
event2 = JsonEvent.END_ARRAY;
}
state = OK;
break;
// {
case -6:
if (!push(MODE_KEY)) {
event1 = JsonEvent.ERROR;
return;
}
state = OB;
event1 = JsonEvent.START_OBJECT;
break;
// [
case -5:
if (!push(MODE_ARRAY)) {
event1 = JsonEvent.ERROR;
return;
}
state = AR;
event1 = JsonEvent.START_ARRAY;
break;
// "
case -4:
switch (stack[top]) {
case MODE_KEY:
state = CO;
event1 = JsonEvent.FIELD_NAME;
break;
case MODE_ARRAY:
case MODE_OBJECT:
state = OK;
event1 = JsonEvent.VALUE_STRING;
break;
default:
event1 = JsonEvent.ERROR;
return;
}
break;
// ,
case -3:
switch (stack[top]) {
case MODE_OBJECT:
// A comma causes a flip from object mode to key mode.
if (!pop(MODE_OBJECT) || !push(MODE_KEY)) {
event1 = JsonEvent.ERROR;
return;
}
event1 = stateToEvent();
state = KE;
break;
case MODE_ARRAY:
event1 = stateToEvent();
state = VA;
break;
default:
event1 = JsonEvent.ERROR;
return;
}
break;
// :
case -2:
// A colon causes a flip from key mode to object mode.
if (!pop(MODE_KEY) || !push(MODE_OBJECT)) {
event1 = JsonEvent.ERROR;
return;
}
state = VA;
break;
// Bad action.
default:
event1 = JsonEvent.ERROR;
return;
}
}
}
/**
* Converts the current parser state to a JSON event
* @return the JSON event or {@link JsonEvent#NEED_MORE_INPUT} if the
* current state does not produce a JSON event
*/
private int stateToEvent() {
if (state == IN || state == ZE) {
return JsonEvent.VALUE_INT;
} else if (state == FR || state == E1 || state == E2 || state == E3) {
return JsonEvent.VALUE_DOUBLE;
} else if (state == T3) {
return JsonEvent.VALUE_TRUE;
} else if (state == F4) {
return JsonEvent.VALUE_FALSE;
} else if (state == N3) {
return JsonEvent.VALUE_NULL;
}
return JsonEvent.NEED_MORE_INPUT;
}
/**
* If the event returned by {@link #nextEvent()} was
* {@link JsonEvent#VALUE_STRING} this method will return the parsed string
* @return the parsed string
*/
public String getCurrentString() {
return currentValue.toString();
}
/**
* If the event returned by {@link #nextEvent()} was
* {@link JsonEvent#VALUE_INT} this method will return the parsed integer
* @return the parsed integer
*/
public int getCurrentInt() {
return Integer.parseInt(currentValue.toString());
}
/**
* If the event returned by {@link #nextEvent()} was
* {@link JsonEvent#VALUE_DOUBLE} this method will return the parsed double
* @return the parsed double
*/
public double getCurrentDouble() {
return Double.parseDouble(currentValue.toString());
}
/**
* <p>Get the number of characters processed by the JSON parser so far.</p>
* <p>Use this method to get the location of an event returned by
* {@link #nextEvent()}. Note that the character count is always greater than
* the actual position of the event in the parsed JSON text. For example, if
* {@link #nextEvent()} returns {@link JsonEvent#START_OBJECT} and the
* character count is <code>n</code>, the location of the <code>{</code>
* character is <code>n-1</code>. If {@link #nextEvent()} returns
* {@link JsonEvent#FIELD_NAME} and the parsed field name is
* <code>"id"</code>, the location is <code>n-4</code> because the field
* name is 4 characters long (including the quotes) and the parser has
* already processed all characters of it.</p>
* @return the character offset
* @since 1.1.0
*/
public int getParsedCharacterCount() {
return parsedCharacterCount;
}
}
| Improve performance by reusing the internal string builder
| src/main/java/de/undercouch/actson/JsonParser.java | Improve performance by reusing the internal string builder | <ide><path>rc/main/java/de/undercouch/actson/JsonParser.java
<ide> * Collects all characters if the current state is ST (String),
<ide> * IN (Integer), FR (Fraction) or the like
<ide> */
<del> private StringBuilder currentValue;
<add> private StringBuilder currentValue = new StringBuilder(128);
<ide>
<ide> /**
<ide> * The number of characters processed by the JSON parser
<ide> if (nextState >= 0) {
<ide> if (nextState >= ST && nextState <= E3) {
<ide> if (state < ST || state > E3) {
<del> currentValue = new StringBuilder();
<add> currentValue.setLength(0);
<ide> if (nextState != ST) {
<ide> currentValue.append(nextChar);
<ide> } |
|
Java | epl-1.0 | error: pathspec 'cda/plugins/org.openhealthtools.mdht.uml.cda.resources/src/org/openhealthtools/mdht/uml/cda/resources/util/ICDAProfileConstants.java' did not match any file(s) known to git
| b199d718065cb64a64034cef1ce19a045e49eddd | 1 | vadimnehta/mdht,mdht/mdht,sarpkayanehta/mdht,drbgfc/mdht,drbgfc/mdht,drbgfc/mdht,vadimnehta/mdht,drbgfc/mdht,mdht/mdht,vadimnehta/mdht,sarpkayanehta/mdht,sarpkayanehta/mdht,sarpkayanehta/mdht,vadimnehta/mdht,drbgfc/mdht,sarpkayanehta/mdht,vadimnehta/mdht,mdht/mdht,sarpkayanehta/mdht,vadimnehta/mdht,mdht/mdht,drbgfc/mdht,mdht/mdht | package org.openhealthtools.mdht.uml.cda.resources.util;
public interface ICDAProfileConstants {
public static final String CDA_PROFILE_NAME = "CDA";
/*
* Validation Support Stereotype and Properties
*/
public static final String VALIDATION_SUPPORT = "ValidationSupport";
public static final String VALIDATION_SUPPORT_MESSAGE = "message";
public static final String VALIDATION_SUPPORT_SEVERITY = "severity";
} | cda/plugins/org.openhealthtools.mdht.uml.cda.resources/src/org/openhealthtools/mdht/uml/cda/resources/util/ICDAProfileConstants.java | ICDAProfileConstants was created to be consistent with IHDFProfileConstants. | cda/plugins/org.openhealthtools.mdht.uml.cda.resources/src/org/openhealthtools/mdht/uml/cda/resources/util/ICDAProfileConstants.java | ICDAProfileConstants was created to be consistent with IHDFProfileConstants. | <ide><path>da/plugins/org.openhealthtools.mdht.uml.cda.resources/src/org/openhealthtools/mdht/uml/cda/resources/util/ICDAProfileConstants.java
<add>package org.openhealthtools.mdht.uml.cda.resources.util;
<add>
<add>public interface ICDAProfileConstants {
<add>
<add> public static final String CDA_PROFILE_NAME = "CDA";
<add>
<add> /*
<add> * Validation Support Stereotype and Properties
<add> */
<add> public static final String VALIDATION_SUPPORT = "ValidationSupport";
<add> public static final String VALIDATION_SUPPORT_MESSAGE = "message";
<add> public static final String VALIDATION_SUPPORT_SEVERITY = "severity";
<add>} |
|
JavaScript | mit | 68fc9c293e3167f383b31a39b787cdf262157339 | 0 | ijse/FED,dYb/FED | #!/usr/bin/env node
var Serve2 = require('serve2');
var program = Serve2.cli;
var Freemarker = require('freemarker.js');
program
.option(' --view-root <viewRoot>', 'freemarker templates root folder')
program.parse(process.argv);
var server = new Serve2(program, function(notify) {
// Since `serv` havn't returned
var serv = this;
// load middlewares
notify.on('beforeMock', function(app, opts) {
if(!argObj.viewRoot) return ;
var viewRoot = join(serv.path, argObj.viewRoot);
var freemarker = new Freemarker({
viewRoot: viewRoot
});
app.use(function(req, res, next) {
res.render = (function(view, data, done) {
var _this = this;
freemarker.render(view, data, function(err, data, out) {
_this.end(data||out);
done && done(err, data||out);
});
}).bind(res);
next();
});
app.render = freemarker.render;
});
});
server.start(function() {
console.log('\033[90mserving \033[36m%s\033[90m on port \033[96m%d\033[0m', server.path, server.port);
});
| bin/fed-server.js |
// // fed server
// program
// .command('server')
// .description('Launch local http service with serve2')
// .allowUnknownOption()
// .option('-h, --help', 'show help', serve2.showHelp)
// .option(' --view-root <viewRoot>', 'freemarker templates root folder')
// .action(serve2.start.bind(null, function() {
// console.log('Server start at port %s in dir %s. ', this.port, this.path)
// }));
var Serve2 = require('serve2');
var program = Serve2.cli;
var Freemarker = require('freemarker.js');
program
.option(' --view-root <viewRoot>', 'freemarker templates root folder')
program.parse(process.argv);
var server = new Serve2(program, function(notify) {
// Since `serv` havn't returned
var serv = this;
// load middlewares
notify.on('beforeMock', function(app, opts) {
if(!argObj.viewRoot) return ;
var viewRoot = join(serv.path, argObj.viewRoot);
var freemarker = new Freemarker({
viewRoot: viewRoot
});
app.use(function(req, res, next) {
res.render = (function(view, data, done) {
var _this = this;
freemarker.render(view, data, function(err, data, out) {
_this.end(data||out);
done && done(err, data||out);
});
}).bind(res);
next();
});
app.render = freemarker.render;
});
});
server.start(function() {
console.log('\033[90mserving \033[36m%s\033[90m on port \033[96m%d\033[0m', server.path, server.port);
});
| fix js file env
| bin/fed-server.js | fix js file env | <ide><path>in/fed-server.js
<del>
<del>// // fed server
<del>// program
<del>// .command('server')
<del>// .description('Launch local http service with serve2')
<del>// .allowUnknownOption()
<del>// .option('-h, --help', 'show help', serve2.showHelp)
<del>// .option(' --view-root <viewRoot>', 'freemarker templates root folder')
<del>// .action(serve2.start.bind(null, function() {
<del>// console.log('Server start at port %s in dir %s. ', this.port, this.path)
<del>// }));
<add>#!/usr/bin/env node
<ide>
<ide> var Serve2 = require('serve2');
<ide> var program = Serve2.cli; |
|
Java | mit | 48207c005c597424bc1516b5574e97e19618c89d | 0 | Bernardo-MG/maven-site-fixer | /**
* The MIT License (MIT)
* <p>
* Copyright (c) 2015-2017 the original author or authors.
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.wandrell.velocity.tool;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.Collection;
import org.apache.velocity.tools.config.DefaultKey;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Element;
import org.jsoup.parser.Tag;
/**
* Utilities class for upgrading a Velocity's XHTML code to HTML5.
* <p>
* This was created for Maven Sites, which are built through Doxia. This
* supports XHTML, and not HTML5, which has the effect making such pages, by
* default, outdated.
* <p>
* The various methods contained in this class aim to fix this problem, and will
* transform several known errors into valid HTML5.
* <p>
* The class makes use of <a href="http://jsoup.org/">jsoup</a> for querying and
* editing. This library will process the HTML code received by the methods, so
* only the contents of the {@code <body>} tag (or the full HTML if this tag is
* missing) will be used.
* <p>
* Take into account that while the returned HTML will be correct, the validity
* of the received HTML won't be checked. That falls fully on the hands of the
* user.
*
* @author Bernardo Martínez Garrido
*/
@DefaultKey("html5UpdateTool")
public class Html5UpdateUtils {
/**
* Constructs an instance of the {@code HTML5UpdateUtils}.
*/
public Html5UpdateUtils() {
super();
}
/**
* Returns the result from fixing internal links, and ids, which are using
* point separators from the received HTML code. This fix consists just on
* removing said points.
* <p>
* Some internal links on Doxia sites use points on the anchor ids, and this
* stops such links from working correctly.
* <p>
* This method will transform any id, or internal href, such as
* "id.with.points" to "idwithpoints".
*
* @param html
* HTML where points are to be removed from internal lins and ids
* @return HTML content, with the points removed from internal links and ids
*/
public final String fixInternalLinks(final String html) {
final Element body; // Body of the HTML code
checkNotNull(html, "Received a null pointer as html");
body = Jsoup.parse(html).body();
removePointsFromIds(body);
removePointsFromInternalHref(body);
return body.html();
}
/**
* Returns the result from removing the {@code externalLink} class from
* links from the received HTML code.
* <p>
* These are used by Doxia but are meaningless.
* <p>
* If a after removing the class any link ends without classes, then the
* {@code class} attribute will be removed too.
*
* @param html
* HTML where the {@code externalLink} class is to be removed
* @return HTML content with the {@code externalLink} class removed
*/
public final String removeExternalLinks(final String html) {
final Element body; // Body of the HTML code
checkNotNull(html, "Received a null pointer as html");
body = Jsoup.parse(html).body();
// <a> elements with the externalLink class
removeClass(body, "a.externalLink", "externalLink");
return body.html();
}
/**
* Returns the result from removing links with no {@code href} attribute
* from the received HTML code.
* <p>
* These links are added by Doxia mainly to the headings. The idea seems to
* allow getting an internal anchor by clicking on a heading, but it does
* not work correctly on all skins, or maybe it is just missing something,
* making it invalid HTML code.
*
* @param html
* HTML where links with no {@code href} attribute are to be
* removed
* @return HTML content, with no link missing the {@code href} attribute
*/
public final String removeNoHrefLinks(final String html) {
final Iterable<Element> links; // Links to fix
final Element body; // Body of the HTML code
checkNotNull(html, "Received a null pointer as html");
body = Jsoup.parse(html).body();
// Links missing the href attribute
links = body.select("a:not([href])");
for (final Element link : links) {
link.unwrap();
}
return body.html();
}
/**
* Returns the result from updating and correcting source divisions on the
* received HTML code.
* <p>
* Outdated source divisions, which look as {@code
* <div class="source">}, are transformed to the new {@code <code>} elements
* Additionally, it will correct the position of the {@code pre} element,
* will me moved out of the code section.
* <p>
* It also fixes a Doxia error where the source division is wrapped by a
* second source division.
*
* @param html
* HTML where the source sections are to be updated
* @return HTML content, with the source sections updated
*/
public final String updateCodeSections(final String html) {
final Element body; // Body of the HTML code
checkNotNull(html, "Received a null pointer as html");
body = Jsoup.parse(html).body();
removeRedundantSourceDivs(body);
takeOutSourceDivPre(body);
updateSourceDivsToCode(body);
return body.html();
}
/**
* Returns the result from updating section divisions, such as {@code
* <div class="section">}, to the new {@code <section>} element on the
* received HTML code.
*
* @param html
* HTML where the section divisions are to be updated
* @return HTML content, with the section divisions updated
*/
public final String updateSectionDiv(final String html) {
final Element body; // Body of the HTML code
checkNotNull(html, "Received a null pointer as html");
body = Jsoup.parse(html).body();
// divs with the section class
retag(body, "div.section", "section", "section");
return body.html();
}
/**
* Returns the result from updating the tables, by applying various fixes
* and removing unneeded code, on the received HTML code.
* <p>
* This method will add the missing {@code <thead>} element to table, remove
* the unneeded border attribute and the {@code bodyTable} class.
* <p>
* It also removes the alternating rows attributes, which marks them as the
* {@code a} and {@code b} classes. This seems to be an outdated method to
* get alternating colored rows.
*
* @param html
* HTML with tables to update
* @return HTML content, with the tables updated
*/
public final String updateTables(final String html) {
final Element body; // Body of the HTML code
checkNotNull(html, "Received a null pointer as html");
body = Jsoup.parse(html).body();
removeTableBodyClass(body);
updateTableHeads(body);
removeTableBorder(body);
updateTableRowAlternates(body);
return body.html();
}
/**
* Finds a set of elements through a CSS selector and removes the received
* class from them.
* <p>
* If the elements end without classes then the class attribute is also
* removed.
*
* @param body
* body where the elements will be searched for
* @param select
* CSS selector for the elements
* @param className
* class to remove
*/
private final void removeClass(final Element body, final String select,
final String className) {
final Iterable<Element> elements; // Elements selected
// Tables with the bodyTable class
elements = body.select(select);
for (final Element element : elements) {
element.removeClass(className);
if (element.classNames().isEmpty()) {
element.removeAttr("class");
}
}
}
/**
* Removes the points from the contents of the specified attribute.
*
* @param element
* element with the attribute to clean
* @param attr
* attribute to clean
*/
private final void removePointsFromAttr(final Element element,
final String attr) {
final String value; // Content of the attribute
value = element.attr(attr).replaceAll("\\.", "");
element.attr(attr, value);
}
/**
* Removes the points from the contents of the specified attribute.
*
* @param body
* body element with attributes to fix
* @param selector
* CSS selector for the elements
* @param attr
* attribute to clean
*/
private final void removePointsFromAttr(final Element body,
final String selector, final String attr) {
final Iterable<Element> elements; // Elements to fix
// Elements with the id attribute
elements = body.select(selector);
for (final Element element : elements) {
removePointsFromAttr(element, attr);
}
}
/**
* Removes points from the {@code id} attributes.
*
* @param body
* body element with ids to fix
*/
private final void removePointsFromIds(final Element body) {
removePointsFromAttr(body, "[id]", "id");
}
/**
* Removes points from the {@code href} attributes, if these are using
* internal anchors.
*
* @param body
* body element with links to fix
*/
private final void removePointsFromInternalHref(final Element body) {
removePointsFromAttr(body, "[href^=\"#\"]", "href");
}
/**
* Removes redundant source divisions. This serves as a cleanup step before
* updating the code sections.
* <p>
* Sites created with Doxia for some reason wrap a source code division with
* another source code division, and this needs to be fixed before applying
* other fixes to such divisions.
* <p>
* Due to the way this method works, if those divisions were to have more
* than a code division, those additional elements will be lost.
*
* @param body
* body element with source divisions to fix
*/
private final void removeRedundantSourceDivs(final Element body) {
final Iterable<Element> sourceDivs; // Repeated source divs
Element parent; // Parent <div>
// Divs with the source class with another div with the source class as
// a child
sourceDivs = body.select("div.source > div.source");
for (final Element div : sourceDivs) {
parent = div.parent();
div.remove();
parent.replaceWith(div);
}
}
/**
* Removes the {@code bodyTable} from tables.
* <p>
* If the table ends without classes, then the {@code class} attribute is
* removed.
*
* @param body
* body element with tables to fix
*/
private final void removeTableBodyClass(final Element body) {
removeClass(body, "table.bodyTable", "bodyTable");
}
/**
* Removes the {@code border} attribute from {@code <table} elements.
* <p>
* This attribute, which should be defined in CSS files, is added by Doxia
* to tables.
*
* @param body
* body element with tables to fix
*/
private final void removeTableBorder(final Element body) {
final Iterable<Element> tables; // Tables to fix
// Selects tables with border defined
tables = body.select("table[border]");
for (final Element table : tables) {
table.removeAttr("border");
}
}
/**
* Finds a set of elements through a CSS selector and changes their tags,
* also removes the received class from them.
* <p>
* If the elements end without classes then the class attribute is also
* removed.
*
* @param body
* body where the elements will be searched for
* @param select
* CSS selector for the elements
* @param tag
* new tag for the elements
* @param className
* class to remove
*/
private final void retag(final Element body, final String select,
final String tag, final String className) {
final Iterable<Element> elements; // Elements selected
// Tables with the bodyTable class
elements = body.select(select);
for (final Element element : elements) {
element.tagName(tag);
element.removeClass(className);
if (element.classNames().isEmpty()) {
element.removeAttr("class");
}
}
}
/**
* Moves the {@code pre} element out of source divisions, so it wraps said
* division, and not the other way around.
* <p>
* Note that these source divisions are expected to have only one children
* with the {@code pre} tag.
*
* @param body
* body element with source divisions to upgrade
*/
private final void takeOutSourceDivPre(final Element body) {
final Iterable<Element> divs; // Code divisions
Collection<Element> pres; // Code preservations
Element pre; // <pre> element
String text; // Preserved text
// Divs with the source class and a pre
divs = body.select("div.source:has(pre)");
for (final Element div : divs) {
pres = div.getElementsByTag("pre");
if (!pres.isEmpty()) {
pre = pres.iterator().next();
text = pre.text();
pre.text("");
div.replaceWith(pre);
pre.appendChild(div);
div.text(text);
}
}
}
/**
* Transforms {@code <div>} elements with the {@code source} class into
* {@code <code>} elements.
*
* @param body
* body element with source division to upgrade
*/
private final void updateSourceDivsToCode(final Element body) {
// Divs with the source class
retag(body, "div.source", "code", "source");
}
/**
* Corrects table headers by adding a {@code <thead>} section where missing.
* <p>
* This serves to fix an error with tables created by Doxia, which will add
* the header rows into the {@code <tbody>} element, instead on a {@code
* <thead>} element.
*
* @param body
* body element with tables to fix
*/
private final void updateTableHeads(final Element body) {
final Iterable<Element> tableHeadRows; // Heads to fix
Element table; // HTML table
Element thead; // Table's head for wrapping
// Table rows with <th> tags in a <tbody>
tableHeadRows = body.select("table > tbody > tr:has(th)");
for (final Element row : tableHeadRows) {
// Gets the row's table
// The selector ensured the row is inside a tbody
table = row.parent().parent();
// Removes the row from its original position
row.remove();
// Creates a table header element with the row
thead = new Element(Tag.valueOf("thead"), "");
thead.appendChild(row);
// Adds the head at the beginning of the table
table.prependChild(thead);
}
}
/**
* Removes the alternating {@code a} and {@code b} classes from table rows.
* <p>
* This seems to be an obsolete way to get alternate colored rows.
*
* @param body
* body element with tables to fix
*/
private final void updateTableRowAlternates(final Element body) {
// Table rows with the class "a" or "b"
removeClass(body, "tr.a", "a");
removeClass(body, "tr.b", "b");
}
}
| src/main/java/com/wandrell/velocity/tool/Html5UpdateUtils.java | /**
* The MIT License (MIT)
* <p>
* Copyright (c) 2015-2017 the original author or authors.
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.wandrell.velocity.tool;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.Collection;
import org.apache.velocity.tools.config.DefaultKey;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Element;
import org.jsoup.parser.Tag;
/**
* Utilities class for upgrading a Velocity's XHTML code to HTML5.
* <p>
* This was created for Maven Sites, which are built through Doxia. This
* supports XHTML, and not HTML5, which has the effect making such pages, by
* default, outdated.
* <p>
* The various methods contained in this class aim to fix this problem, and will
* transform several known errors into valid HTML5.
* <p>
* The class makes use of <a href="http://jsoup.org/">jsoup</a> for querying and
* editing. This library will process the HTML code received by the methods, so
* only the contents of the {@code <body>} tag (or the full HTML if this tag is
* missing) will be used.
* <p>
* Take into account that while the returned HTML will be correct, the validity
* of the received HTML won't be checked. That falls fully on the hands of the
* user.
*
* @author Bernardo Martínez Garrido
*/
@DefaultKey("html5UpdateTool")
public class Html5UpdateUtils {
/**
* Constructs an instance of the {@code HTML5UpdateUtils}.
*/
public Html5UpdateUtils() {
super();
}
/**
* Returns the result from fixing internal links, and ids, which are using
* point separators from the received HTML code. This fix consists just on
* removing said points.
* <p>
* Some internal links on Doxia sites use points on the anchor ids, and this
* stops such links from working correctly.
* <p>
* This method will transform any id, or internal href, such as
* "id.with.points" to "idwithpoints".
*
* @param html
* HTML where points are to be removed from internal lins and ids
* @return HTML content, with the points removed from internal links and ids
*/
public final String fixInternalLinks(final String html) {
final Element body; // Body of the HTML code
checkNotNull(html, "Received a null pointer as html");
body = Jsoup.parse(html).body();
removePointsFromIds(body);
removePointsFromInternalHref(body);
return body.html();
}
/**
* Returns the result from removing the {@code externalLink} class from
* links from the received HTML code.
* <p>
* These are used by Doxia but are meaningless.
* <p>
* If a after removing the class any link ends without classes, then the
* {@code class} attribute will be removed too.
*
* @param html
* HTML where the {@code externalLink} class is to be removed
* @return HTML content with the {@code externalLink} class removed
*/
public final String removeExternalLinks(final String html) {
final Element body; // Body of the HTML code
checkNotNull(html, "Received a null pointer as html");
body = Jsoup.parse(html).body();
// <a> elements with the externalLink class
removeClass(body, "a.externalLink", "externalLink");
return body.html();
}
/**
* Returns the result from removing links with no {@code href} attribute
* from the received HTML code.
* <p>
* These links are added by Doxia mainly to the headings. The idea seems to
* allow getting an internal anchor by clicking on a heading, but it does
* not work correctly on all skins, or maybe it is just missing something,
* making it invalid HTML code.
*
* @param html
* HTML where links with no {@code href} attribute are to be
* removed
* @return HTML content, with no link missing the {@code href} attribute
*/
public final String removeNoHrefLinks(final String html) {
final Iterable<Element> links; // Links to fix
final Element body; // Body of the HTML code
checkNotNull(html, "Received a null pointer as html");
body = Jsoup.parse(html).body();
// Links missing the href attribute
links = body.select("a:not([href])");
for (final Element link : links) {
link.unwrap();
}
return body.html();
}
/**
* Returns the result from updating and correcting source divisions on the
* received HTML code.
* <p>
* Outdated source divisions, which look as {@code
* <div class="source">}, are transformed to the new {@code <code>} elements
* Additionally, it will correct the position of the {@code pre} element,
* will me moved out of the code section.
* <p>
* It also fixes a Doxia error where the source division is wrapped by a
* second source division.
*
* @param html
* HTML where the source sections are to be updated
* @return HTML content, with the source sections updated
*/
public final String updateCodeSections(final String html) {
final Element body; // Body of the HTML code
checkNotNull(html, "Received a null pointer as html");
body = Jsoup.parse(html).body();
removeRedundantSourceDivs(body);
takeOutSourceDivPre(body);
updateSourceDivsToCode(body);
return body.html();
}
/**
* Returns the result from updating section divisions, such as {@code
* <div class="section">}, to the new {@code <section>} element on the
* received HTML code.
*
* @param html
* HTML where the section divisions are to be updated
* @return HTML content, with the section divisions updated
*/
public final String updateSectionDiv(final String html) {
final Element body; // Body of the HTML code
checkNotNull(html, "Received a null pointer as html");
body = Jsoup.parse(html).body();
// divs with the section class
retag(body, "div.section", "section", "section");
return body.html();
}
/**
* Returns the result from updating the tables, by applying various fixes
* and removing unneeded code, on the received HTML code.
* <p>
* This method will add the missing {@code <thead>} element to table, remove
* the unneeded border attribute and the {@code bodyTable} class.
* <p>
* It also removes the alternating rows attributes, which marks them as the
* {@code a} and {@code b} classes. This seems to be an outdated method to
* get alternating colored rows.
*
* @param html
* HTML with tables to update
* @return HTML content, with the tables updated
*/
public final String updateTables(final String html) {
final Element body; // Body of the HTML code
checkNotNull(html, "Received a null pointer as html");
body = Jsoup.parse(html).body();
removeTableBodyClass(body);
updateTableHeads(body);
removeTableBorder(body);
updateTableRowAlternates(body);
return body.html();
}
/**
* Finds a set of elements through a CSS selector and removes the received
* class from them.
* <p>
* If the elements end without classes then the class attribute is also
* removed.
*
* @param body
* body where the elements will be searched for
* @param select
* CSS selector for the elements
* @param className
* class to remove
*/
private final void removeClass(final Element body, final String select,
final String className) {
final Iterable<Element> elements; // Elements selected
// Tables with the bodyTable class
elements = body.select(select);
for (final Element element : elements) {
element.removeClass(className);
if (element.classNames().isEmpty()) {
element.removeAttr("class");
}
}
}
/**
* Removes points from the {@code id} attributes.
*
* @param body
* body element with ids to fix
*/
private final void removePointsFromIds(final Element body) {
final Iterable<Element> elements; // Elements to fix
String id; // id attribute contents
// Elements with the id attribute
elements = body.select("[id]");
for (final Element element : elements) {
id = element.attr("id").replaceAll("\\.", "");
element.attr("id", id);
}
}
/**
* Removes points from the {@code href} attributes, if these are using
* internal anchors.
*
* @param body
* body element with links to fix
*/
private final void removePointsFromInternalHref(final Element body) {
final Iterable<Element> links; // Links to fix
String href; // href attribute contents
// Elements with an internal href
links = body.select("[href^=\"#\"]");
for (final Element element : links) {
href = element.attr("href").replaceAll("\\.", "");
element.attr("href", href);
}
}
/**
* Removes redundant source divisions. This serves as a cleanup step before
* updating the code sections.
* <p>
* Sites created with Doxia for some reason wrap a source code division with
* another source code division, and this needs to be fixed before applying
* other fixes to such divisions.
* <p>
* Due to the way this method works, if those divisions were to have more
* than a code division, those additional elements will be lost.
*
* @param body
* body element with source divisions to fix
*/
private final void removeRedundantSourceDivs(final Element body) {
final Iterable<Element> sourceDivs; // Repeated source divs
Element parent; // Parent <div>
// Divs with the source class with another div with the source class as
// a child
sourceDivs = body.select("div.source > div.source");
for (final Element div : sourceDivs) {
parent = div.parent();
div.remove();
parent.replaceWith(div);
}
}
/**
* Removes the {@code bodyTable} from tables.
* <p>
* If the table ends without classes, then the {@code class} attribute is
* removed.
*
* @param body
* body element with tables to fix
*/
private final void removeTableBodyClass(final Element body) {
removeClass(body, "table.bodyTable", "bodyTable");
}
/**
* Removes the {@code border} attribute from {@code <table} elements.
* <p>
* This attribute, which should be defined in CSS files, is added by Doxia
* to tables.
*
* @param body
* body element with tables to fix
*/
private final void removeTableBorder(final Element body) {
final Iterable<Element> tables; // Tables to fix
// Selects tables with border defined
tables = body.select("table[border]");
for (final Element table : tables) {
table.removeAttr("border");
}
}
/**
* Finds a set of elements through a CSS selector and changes their tags,
* also removes the received class from them.
* <p>
* If the elements end without classes then the class attribute is also
* removed.
*
* @param body
* body where the elements will be searched for
* @param select
* CSS selector for the elements
* @param tag
* new tag for the elements
* @param className
* class to remove
*/
private final void retag(final Element body, final String select,
final String tag, final String className) {
final Iterable<Element> elements; // Elements selected
// Tables with the bodyTable class
elements = body.select(select);
for (final Element element : elements) {
element.tagName(tag);
element.removeClass(className);
if (element.classNames().isEmpty()) {
element.removeAttr("class");
}
}
}
/**
* Moves the {@code pre} element out of source divisions, so it wraps said
* division, and not the other way around.
* <p>
* Note that these source divisions are expected to have only one children
* with the {@code pre} tag.
*
* @param body
* body element with source divisions to upgrade
*/
private final void takeOutSourceDivPre(final Element body) {
final Iterable<Element> divs; // Code divisions
Collection<Element> pres; // Code preservations
Element pre; // <pre> element
String text; // Preserved text
// Divs with the source class and a pre
divs = body.select("div.source:has(pre)");
for (final Element div : divs) {
pres = div.getElementsByTag("pre");
if (!pres.isEmpty()) {
pre = pres.iterator().next();
text = pre.text();
pre.text("");
div.replaceWith(pre);
pre.appendChild(div);
div.text(text);
}
}
}
/**
* Transforms {@code <div>} elements with the {@code source} class into
* {@code <code>} elements.
*
* @param body
* body element with source division to upgrade
*/
private final void updateSourceDivsToCode(final Element body) {
// Divs with the source class
retag(body, "div.source", "code", "source");
}
/**
* Corrects table headers by adding a {@code <thead>} section where missing.
* <p>
* This serves to fix an error with tables created by Doxia, which will add
* the header rows into the {@code <tbody>} element, instead on a {@code
* <thead>} element.
*
* @param body
* body element with tables to fix
*/
private final void updateTableHeads(final Element body) {
final Iterable<Element> tableHeadRows; // Heads to fix
Element table; // HTML table
Element thead; // Table's head for wrapping
// Table rows with <th> tags in a <tbody>
tableHeadRows = body.select("table > tbody > tr:has(th)");
for (final Element row : tableHeadRows) {
// Gets the row's table
// The selector ensured the row is inside a tbody
table = row.parent().parent();
// Removes the row from its original position
row.remove();
// Creates a table header element with the row
thead = new Element(Tag.valueOf("thead"), "");
thead.appendChild(row);
// Adds the head at the beginning of the table
table.prependChild(thead);
}
}
/**
* Removes the alternating {@code a} and {@code b} classes from table rows.
* <p>
* This seems to be an obsolete way to get alternate colored rows.
*
* @param body
* body element with tables to fix
*/
private final void updateTableRowAlternates(final Element body) {
// Table rows with the class "a" or "b"
removeClass(body, "tr.a", "a");
removeClass(body, "tr.b", "b");
}
}
| Reduced code repetition | src/main/java/com/wandrell/velocity/tool/Html5UpdateUtils.java | Reduced code repetition | <ide><path>rc/main/java/com/wandrell/velocity/tool/Html5UpdateUtils.java
<ide> }
<ide>
<ide> /**
<add> * Removes the points from the contents of the specified attribute.
<add> *
<add> * @param element
<add> * element with the attribute to clean
<add> * @param attr
<add> * attribute to clean
<add> */
<add> private final void removePointsFromAttr(final Element element,
<add> final String attr) {
<add> final String value; // Content of the attribute
<add>
<add> value = element.attr(attr).replaceAll("\\.", "");
<add>
<add> element.attr(attr, value);
<add> }
<add>
<add> /**
<add> * Removes the points from the contents of the specified attribute.
<add> *
<add> * @param body
<add> * body element with attributes to fix
<add> * @param selector
<add> * CSS selector for the elements
<add> * @param attr
<add> * attribute to clean
<add> */
<add> private final void removePointsFromAttr(final Element body,
<add> final String selector, final String attr) {
<add> final Iterable<Element> elements; // Elements to fix
<add>
<add> // Elements with the id attribute
<add> elements = body.select(selector);
<add> for (final Element element : elements) {
<add> removePointsFromAttr(element, attr);
<add> }
<add> }
<add>
<add> /**
<ide> * Removes points from the {@code id} attributes.
<ide> *
<ide> * @param body
<ide> * body element with ids to fix
<ide> */
<ide> private final void removePointsFromIds(final Element body) {
<del> final Iterable<Element> elements; // Elements to fix
<del> String id; // id attribute contents
<del>
<del> // Elements with the id attribute
<del> elements = body.select("[id]");
<del> for (final Element element : elements) {
<del> id = element.attr("id").replaceAll("\\.", "");
<del>
<del> element.attr("id", id);
<del> }
<add> removePointsFromAttr(body, "[id]", "id");
<ide> }
<ide>
<ide> /**
<ide> * body element with links to fix
<ide> */
<ide> private final void removePointsFromInternalHref(final Element body) {
<del> final Iterable<Element> links; // Links to fix
<del> String href; // href attribute contents
<del>
<del> // Elements with an internal href
<del> links = body.select("[href^=\"#\"]");
<del> for (final Element element : links) {
<del> href = element.attr("href").replaceAll("\\.", "");
<del>
<del> element.attr("href", href);
<del> }
<add> removePointsFromAttr(body, "[href^=\"#\"]", "href");
<ide> }
<ide>
<ide> /** |
|
Java | agpl-3.0 | bcd619c939adb1d160914e01744b17edfd5d2bff | 0 | deepstupid/sphinx5 | /*
* Copyright 1999-2002 Carnegie Mellon University.
* Portions Copyright 2002 Sun Microsystems, Inc.
* Portions Copyright 2002 Mitsubishi Electric Research Laboratories.
* All Rights Reserved. Use is subject to license terms.
*
* See the file "license.terms" for information on usage and
* redistribution of this file, and for a DISCLAIMER OF ALL
* WARRANTIES.
*
*/
package edu.cmu.sphinx.frontend.util;
import edu.cmu.sphinx.frontend.*;
import edu.cmu.sphinx.util.props.*;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.UnsupportedAudioFileException;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
/**
* An AudioFileDataSource generates a stream of audio data from a given audion file. All required information concerning
* the audio format are read directly from the file . One would need to call {@link #setAudioFile(java.io.File,String)}
* to set the input file, and call {@link #getData} to obtain the Data frames.
* <p/>
* Using JavaSound as backend this class is able to handle all sound files supported by JavaSound. Beside the built-in
* support for .wav, .au and .aiff. Using plugins (cf. http://www.jsresources.org/ ) it can be extended to support
* .ogg, .mp3, .speex and others.
*
* @author Holger Brandl
*/
public class AudioFileDataSource extends BaseDataProcessor {
/** SphinxProperty for the number of bytes to read from the InputStream each time. */
@S4Integer(defaultValue = 3200)
public static final String PROP_BYTES_PER_READ = "bytesPerRead";
/** Default value for PROP_BYTES_PER_READ. */
public static final int PROP_BYTES_PER_READ_DEFAULT = 3200;
protected InputStream dataStream;
protected int sampleRate;
protected int bytesPerRead;
protected int bytesPerValue;
private long totalValuesRead;
protected boolean bigEndian;
protected boolean signedData;
private boolean streamEndReached = false;
private boolean utteranceEndSent = false;
private boolean utteranceStarted = false;
@S4ComponentList(type = Configurable.class)
public static final String AUDIO_FILE_LISTENERS = "audioFileListners";
protected List<AudioFileProcessListener> fileListeners = new ArrayList<AudioFileProcessListener>();
private File curAudioFile;
public AudioFileDataSource() {
this(PROP_BYTES_PER_READ_DEFAULT);
}
public AudioFileDataSource(int bytesPerRead) {
super();
initialize();
this.bytesPerRead = bytesPerRead;
}
/*
* (non-Javadoc)
*
* @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet)
*/
public void newProperties(PropertySheet ps) throws PropertyException {
super.newProperties(ps);
bytesPerRead = ps.getInt(PROP_BYTES_PER_READ);
// attach all pool-listeners
List<? extends Configurable> list = ps.getComponentList(AUDIO_FILE_LISTENERS);
for (Configurable configurable : list) {
assert configurable instanceof AudioFileProcessListener;
addNewFileListener((AudioFileProcessListener) configurable);
}
initialize();
}
/*
* (non-Javadoc)
*
* @see edu.cmu.sphinx.frontend.DataProcessor#initialize(edu.cmu.sphinx.frontend.CommonConfig)
*/
public void initialize() {
super.initialize();
if (bytesPerRead % 2 == 1) {
bytesPerRead++;
}
}
/**
* Sets the audio file from which the data-stream will be generated of.
*
* @param audioFile The location of the audio file to use
* @param streamName The name of the InputStream. if <code>null</code> the complete path of the audio file will be
* uses as stream name.
*/
public void setAudioFile(File audioFile, String streamName) {
try {
setAudioFile(audioFile.toURI().toURL(), streamName);
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
/**
* Sets the audio file from which the data-stream will be generated of.
*
* @param audioFileURL The location of the audio file to use
* @param streamName The name of the InputStream. if <code>null</code> the complete path of the audio file will be
* uses as stream name.
*/
public void setAudioFile(URL audioFileURL, String streamName) {
// first close the last stream if there's such a one
if (dataStream != null) {
try {
dataStream.close();
} catch (IOException e) {
e.printStackTrace();
}
dataStream = null;
}
assert audioFileURL != null;
if (streamName != null)
streamName = audioFileURL.getPath();
AudioInputStream audioStream = null;
try {
audioStream = AudioSystem.getAudioInputStream(audioFileURL);
} catch (UnsupportedAudioFileException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
curAudioFile = new File(audioFileURL.getFile());
for (AudioFileProcessListener fileListener : fileListeners)
fileListener.audioFileProcStarted(curAudioFile);
setInputStream(audioStream, streamName);
}
/**
* Sets the InputStream from which this StreamDataSource reads.
*
* @param inputStream the InputStream from which audio data comes
* @param streamName the name of the InputStream
*/
public void setInputStream(AudioInputStream inputStream, String streamName) {
dataStream = inputStream;
streamEndReached = false;
utteranceEndSent = false;
utteranceStarted = false;
AudioFormat format = inputStream.getFormat();
sampleRate = (int) format.getSampleRate();
bigEndian = format.isBigEndian();
if (format.getSampleSizeInBits() % 8 != 0)
throw new Error("StreamDataSource: bits per sample must be a multiple of 8.");
bytesPerValue = format.getSampleSizeInBits() / 8;
// test wether all files in the stream have the same format
AudioFormat.Encoding encoding = format.getEncoding();
if (encoding.equals(AudioFormat.Encoding.PCM_SIGNED))
signedData = true;
else if (encoding.equals(AudioFormat.Encoding.PCM_UNSIGNED))
signedData = false;
else
throw new RuntimeException("used file encoding is not supported");
totalValuesRead = 0;
}
/**
* Reads and returns the next Data from the InputStream of StreamDataSource, return null if no data is read and end
* of file is reached.
*
* @return the next Data or <code>null</code> if none is available
* @throws edu.cmu.sphinx.frontend.DataProcessingException
* if there is a data processing error
*/
public Data getData() throws DataProcessingException {
getTimer().start();
Data output = null;
if (streamEndReached) {
if (!utteranceEndSent) {
// since 'firstSampleNumber' starts at 0, the last
// sample number should be 'totalValuesRead - 1'
output = createDataEndSignal();
utteranceEndSent = true;
}
} else {
if (!utteranceStarted) {
utteranceStarted = true;
output = new DataStartSignal(sampleRate);
} else {
if (dataStream != null) {
output = readNextFrame();
if (output == null) {
if (!utteranceEndSent) {
output = createDataEndSignal();
utteranceEndSent = true;
}
}
}
}
}
getTimer().stop();
return output;
}
private DataEndSignal createDataEndSignal() {
if (!(this instanceof ConcatAudioFileDataSource))
for (AudioFileProcessListener fileListener : fileListeners)
fileListener.audioFileProcFinished(curAudioFile);
return new DataEndSignal(getDuration());
}
/**
* Returns the next Data from the input stream, or null if there is none available
*
* @return a Data or null
* @throws java.io.IOException
*/
private Data readNextFrame() throws DataProcessingException {
// read one frame's worth of bytes
int read;
int totalRead = 0;
final int bytesToRead = bytesPerRead;
byte[] samplesBuffer = new byte[bytesPerRead];
long collectTime = System.currentTimeMillis();
long firstSample = totalValuesRead;
try {
do {
read = dataStream.read(samplesBuffer, totalRead, bytesToRead
- totalRead);
if (read > 0) {
totalRead += read;
}
} while (read != -1 && totalRead < bytesToRead);
if (totalRead <= 0) {
closeDataStream();
return null;
}
// shrink incomplete frames
totalValuesRead += (totalRead / bytesPerValue);
if (totalRead < bytesToRead) {
totalRead = (totalRead % 2 == 0)
? totalRead + 2
: totalRead + 3;
byte[] shrinkedBuffer = new byte[totalRead];
System
.arraycopy(samplesBuffer, 0, shrinkedBuffer, 0,
totalRead);
samplesBuffer = shrinkedBuffer;
closeDataStream();
}
} catch (IOException ioe) {
ioe.printStackTrace();
throw new DataProcessingException("Error reading data");
}
// turn it into an Data object
double[] doubleData;
if (bigEndian) {
doubleData = DataUtil.bytesToValues(samplesBuffer, 0, totalRead, bytesPerValue, signedData);
} else {
doubleData = DataUtil.littleEndianBytesToValues(samplesBuffer, 0, totalRead, bytesPerValue, signedData);
}
return new DoubleData(doubleData, sampleRate, collectTime, firstSample);
}
private void closeDataStream() throws IOException {
streamEndReached = true;
if (dataStream != null) {
dataStream.close();
}
}
/**
* Returns the duration of the current data stream in milliseconds.
*
* @return the duration of the current data stream in milliseconds
*/
private long getDuration() {
return (long) (((double) totalValuesRead / (double) sampleRate) * 1000.0);
}
public int getSampleRate() {
return sampleRate;
}
public boolean isBigEndian() {
return bigEndian;
}
/** Adds a new listener for new file events. */
public void addNewFileListener(AudioFileProcessListener l) {
if (l == null)
return;
fileListeners.add(l);
}
/** Removes a listener for new file events. */
public void removeNewFileListener(AudioFileProcessListener l) {
if (l == null)
return;
fileListeners.remove(l);
}
}
| src/sphinx4/edu/cmu/sphinx/frontend/util/AudioFileDataSource.java | /*
* Copyright 1999-2002 Carnegie Mellon University.
* Portions Copyright 2002 Sun Microsystems, Inc.
* Portions Copyright 2002 Mitsubishi Electric Research Laboratories.
* All Rights Reserved. Use is subject to license terms.
*
* See the file "license.terms" for information on usage and
* redistribution of this file, and for a DISCLAIMER OF ALL
* WARRANTIES.
*
*/
package edu.cmu.sphinx.frontend.util;
import edu.cmu.sphinx.frontend.*;
import edu.cmu.sphinx.util.props.*;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.UnsupportedAudioFileException;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
/**
* An AudioFileDataSource generates a stream of audio data from a given audion file. All required information concerning
* the audio format are read directly from the file . One would need to call {@link #setAudioFile(java.io.File,String)}
* to set the input file, and call {@link #getData} to obtain the Data frames.
* <p/>
* Using JavaSound as backend this class is able to handle all sound files supported by JavaSound. Beside the built-in
* support for .wav, .au and .aiff. Using plugins (cf. http://www.jsresources.org/ ) it can be extended to support
* .ogg, .mp3, .speex and others.
*
* @author Holger Brandl
*/
public class AudioFileDataSource extends BaseDataProcessor {
/** SphinxProperty for the number of bytes to read from the InputStream each time. */
@S4Integer(defaultValue = 3200)
public static final String PROP_BYTES_PER_READ = "bytesPerRead";
/** Default value for PROP_BYTES_PER_READ. */
public static final int PROP_BYTES_PER_READ_DEFAULT = 3200;
protected InputStream dataStream;
protected int sampleRate;
protected int bytesPerRead;
protected int bytesPerValue;
private long totalValuesRead;
protected boolean bigEndian;
protected boolean signedData;
private boolean streamEndReached = false;
private boolean utteranceEndSent = false;
private boolean utteranceStarted = false;
protected List<AudioFileProcessListener> fileListeners = new ArrayList<AudioFileProcessListener>();
private File curAudioFile;
public AudioFileDataSource() {
this(PROP_BYTES_PER_READ_DEFAULT);
}
public AudioFileDataSource(int bytesPerRead) {
super();
initialize();
this.bytesPerRead = bytesPerRead;
}
/*
* (non-Javadoc)
*
* @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet)
*/
public void newProperties(PropertySheet ps) throws PropertyException {
super.newProperties(ps);
bytesPerRead = ps.getInt(PROP_BYTES_PER_READ);
initialize();
}
/*
* (non-Javadoc)
*
* @see edu.cmu.sphinx.frontend.DataProcessor#initialize(edu.cmu.sphinx.frontend.CommonConfig)
*/
public void initialize() {
super.initialize();
if (bytesPerRead % 2 == 1) {
bytesPerRead++;
}
}
/**
* Sets the audio file from which the data-stream will be generated of.
*
* @param audioFile The location of the audio file to use
* @param streamName The name of the InputStream. if <code>null</code> the complete path of the audio file will be
* uses as stream name.
*/
public void setAudioFile(File audioFile, String streamName) {
try {
setAudioFile(audioFile.toURI().toURL(), streamName);
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
/**
* Sets the audio file from which the data-stream will be generated of.
*
* @param audioFileURL The location of the audio file to use
* @param streamName The name of the InputStream. if <code>null</code> the complete path of the audio file will be
* uses as stream name.
*/
public void setAudioFile(URL audioFileURL, String streamName) {
// first close the last stream if there's such a one
if (dataStream != null) {
try {
dataStream.close();
} catch (IOException e) {
e.printStackTrace();
}
dataStream = null;
}
assert audioFileURL != null;
if (streamName != null)
streamName = audioFileURL.getPath();
AudioInputStream audioStream = null;
try {
audioStream = AudioSystem.getAudioInputStream(audioFileURL);
} catch (UnsupportedAudioFileException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
curAudioFile = new File(audioFileURL.getFile());
for (AudioFileProcessListener fileListener : fileListeners)
fileListener.audioFileProcStarted(curAudioFile);
setInputStream(audioStream, streamName);
}
/**
* Sets the InputStream from which this StreamDataSource reads.
*
* @param inputStream the InputStream from which audio data comes
* @param streamName the name of the InputStream
*/
public void setInputStream(AudioInputStream inputStream, String streamName) {
dataStream = inputStream;
streamEndReached = false;
utteranceEndSent = false;
utteranceStarted = false;
AudioFormat format = inputStream.getFormat();
sampleRate = (int) format.getSampleRate();
bigEndian = format.isBigEndian();
if (format.getSampleSizeInBits() % 8 != 0)
throw new Error("StreamDataSource: bits per sample must be a multiple of 8.");
bytesPerValue = format.getSampleSizeInBits() / 8;
// test wether all files in the stream have the same format
AudioFormat.Encoding encoding = format.getEncoding();
if (encoding.equals(AudioFormat.Encoding.PCM_SIGNED))
signedData = true;
else if (encoding.equals(AudioFormat.Encoding.PCM_UNSIGNED))
signedData = false;
else
throw new RuntimeException("used file encoding is not supported");
totalValuesRead = 0;
}
/**
* Reads and returns the next Data from the InputStream of StreamDataSource, return null if no data is read and end
* of file is reached.
*
* @return the next Data or <code>null</code> if none is available
* @throws edu.cmu.sphinx.frontend.DataProcessingException
* if there is a data processing error
*/
public Data getData() throws DataProcessingException {
getTimer().start();
Data output = null;
if (streamEndReached) {
if (!utteranceEndSent) {
// since 'firstSampleNumber' starts at 0, the last
// sample number should be 'totalValuesRead - 1'
output = createDataEndSignal();
utteranceEndSent = true;
}
} else {
if (!utteranceStarted) {
utteranceStarted = true;
output = new DataStartSignal(sampleRate);
} else {
if (dataStream != null) {
output = readNextFrame();
if (output == null) {
if (!utteranceEndSent) {
output = createDataEndSignal();
utteranceEndSent = true;
}
}
}
}
}
getTimer().stop();
return output;
}
private DataEndSignal createDataEndSignal() {
if (!(this instanceof ConcatAudioFileDataSource))
for (AudioFileProcessListener fileListener : fileListeners)
fileListener.audioFileProcFinished(curAudioFile);
return new DataEndSignal(getDuration());
}
/**
* Returns the next Data from the input stream, or null if there is none available
*
* @return a Data or null
* @throws java.io.IOException
*/
private Data readNextFrame() throws DataProcessingException {
// read one frame's worth of bytes
int read;
int totalRead = 0;
final int bytesToRead = bytesPerRead;
byte[] samplesBuffer = new byte[bytesPerRead];
long collectTime = System.currentTimeMillis();
long firstSample = totalValuesRead;
try {
do {
read = dataStream.read(samplesBuffer, totalRead, bytesToRead
- totalRead);
if (read > 0) {
totalRead += read;
}
} while (read != -1 && totalRead < bytesToRead);
if (totalRead <= 0) {
closeDataStream();
return null;
}
// shrink incomplete frames
totalValuesRead += (totalRead / bytesPerValue);
if (totalRead < bytesToRead) {
totalRead = (totalRead % 2 == 0)
? totalRead + 2
: totalRead + 3;
byte[] shrinkedBuffer = new byte[totalRead];
System
.arraycopy(samplesBuffer, 0, shrinkedBuffer, 0,
totalRead);
samplesBuffer = shrinkedBuffer;
closeDataStream();
}
} catch (IOException ioe) {
ioe.printStackTrace();
throw new DataProcessingException("Error reading data");
}
// turn it into an Data object
double[] doubleData;
if (bigEndian) {
doubleData = DataUtil.bytesToValues(samplesBuffer, 0, totalRead, bytesPerValue, signedData);
} else {
doubleData = DataUtil.littleEndianBytesToValues(samplesBuffer, 0, totalRead, bytesPerValue, signedData);
}
return new DoubleData(doubleData, sampleRate, collectTime, firstSample);
}
private void closeDataStream() throws IOException {
streamEndReached = true;
if (dataStream != null) {
dataStream.close();
}
}
/**
* Returns the duration of the current data stream in milliseconds.
*
* @return the duration of the current data stream in milliseconds
*/
private long getDuration() {
return (long) (((double) totalValuesRead / (double) sampleRate) * 1000.0);
}
public int getSampleRate() {
return sampleRate;
}
public boolean isBigEndian() {
return bigEndian;
}
/** Adds a new listener for new file events. */
public void addNewFileListener(AudioFileProcessListener l) {
if (l == null)
return;
fileListeners.add(l);
}
/** Removes a listener for new file events. */
public void removeNewFileListener(AudioFileProcessListener l) {
if (l == null)
return;
fileListeners.remove(l);
}
}
| made list of AudioFileListeners customizable via config-api
git-svn-id: a8b04003a33e1d3e001b9d20391fa392a9f62d91@8009 94700074-3cef-4d97-a70e-9c8c206c02f5
| src/sphinx4/edu/cmu/sphinx/frontend/util/AudioFileDataSource.java | made list of AudioFileListeners customizable via config-api | <ide><path>rc/sphinx4/edu/cmu/sphinx/frontend/util/AudioFileDataSource.java
<ide> private boolean utteranceEndSent = false;
<ide> private boolean utteranceStarted = false;
<ide>
<add> @S4ComponentList(type = Configurable.class)
<add> public static final String AUDIO_FILE_LISTENERS = "audioFileListners";
<ide> protected List<AudioFileProcessListener> fileListeners = new ArrayList<AudioFileProcessListener>();
<add>
<ide> private File curAudioFile;
<ide>
<ide>
<ide> public void newProperties(PropertySheet ps) throws PropertyException {
<ide> super.newProperties(ps);
<ide> bytesPerRead = ps.getInt(PROP_BYTES_PER_READ);
<add>
<add> // attach all pool-listeners
<add> List<? extends Configurable> list = ps.getComponentList(AUDIO_FILE_LISTENERS);
<add> for (Configurable configurable : list) {
<add> assert configurable instanceof AudioFileProcessListener;
<add> addNewFileListener((AudioFileProcessListener) configurable);
<add> }
<ide>
<ide> initialize();
<ide> } |
|
Java | apache-2.0 | e44d119f3a05a5eaef0929144b9515c2313cfd57 | 0 | rvais/xmvn,rvais/xmvn,rvais/xmvn,mizdebsk/xmvn,mizdebsk/xmvn,mizdebsk/xmvn,fedora-java/xmvn,mizdebsk/xmvn,rvais/xmvn,fedora-java/xmvn,fedora-java/xmvn | /*-
* Copyright (c) 2012-2014 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fedoraproject.xmvn.mojo;
import static org.fedoraproject.xmvn.mojo.Utils.aetherArtifact;
import static org.fedoraproject.xmvn.mojo.Utils.saveEffectivePom;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Named;
import org.apache.maven.model.Dependency;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.project.MavenProject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.fedoraproject.xmvn.artifact.Artifact;
import org.fedoraproject.xmvn.artifact.DefaultArtifact;
import org.fedoraproject.xmvn.deployer.Deployer;
import org.fedoraproject.xmvn.deployer.DeploymentRequest;
import org.fedoraproject.xmvn.deployer.DeploymentResult;
import org.fedoraproject.xmvn.utils.ArtifactUtils;
/**
* @author Mikolaj Izdebski
*/
@Mojo( name = "install", aggregator = true, requiresDependencyResolution = ResolutionScope.NONE )
@Named
public class InstallMojo
extends AbstractMojo
{
private static final Set<String> TYCHO_PACKAGING_TYPES = new LinkedHashSet<>();
private static final Set<String> TYCHO_P2_CLASSIFIERS = new LinkedHashSet<>();
static
{
TYCHO_PACKAGING_TYPES.add( "eclipse-plugin" );
TYCHO_PACKAGING_TYPES.add( "eclipse-test-plugin" );
TYCHO_PACKAGING_TYPES.add( "eclipse-feature" );
TYCHO_PACKAGING_TYPES.add( "eclipse-update-site" );
TYCHO_PACKAGING_TYPES.add( "eclipse-application" );
TYCHO_PACKAGING_TYPES.add( "eclipse-repository" );
for ( String packaging : TYCHO_PACKAGING_TYPES )
TYCHO_P2_CLASSIFIERS.add( "p2." + packaging );
}
private static boolean isTychoInjectedDependency( Dependency dependency )
{
return TYCHO_P2_CLASSIFIERS.contains( dependency.getGroupId() );
}
private static boolean isTychoProject( MavenProject project )
{
return TYCHO_PACKAGING_TYPES.contains( project.getPackaging() );
}
private final Logger logger = LoggerFactory.getLogger( InstallMojo.class );
@Parameter( defaultValue = "${reactorProjects}", readonly = true, required = true )
private List<MavenProject> reactorProjects;
private final Deployer deployer;
@Inject
public InstallMojo( Deployer deployer )
{
this.deployer = deployer;
}
/**
* Dump project dependencies with "system" scope and fail if there are any such dependencies are found.
*/
private void handleSystemDependencies()
throws MojoFailureException
{
boolean systemDepsFound = false;
for ( MavenProject project : reactorProjects )
{
Set<Artifact> systemDeps = new LinkedHashSet<>();
for ( Dependency dependency : project.getModel().getDependencies() )
{
// Ignore dependencies injected by Tycho
if ( isTychoProject( project ) && isTychoInjectedDependency( dependency ) )
continue;
if ( dependency.getScope() != null && dependency.getScope().equals( "system" ) )
{
systemDeps.add( new DefaultArtifact( dependency.getGroupId(), dependency.getArtifactId(),
dependency.getClassifier(), dependency.getType(),
dependency.getVersion() ) );
}
}
if ( !systemDeps.isEmpty() )
{
systemDepsFound = true;
logger.error( "Reactor project {} has system-scoped dependencies: {}",
aetherArtifact( project.getArtifact() ),
ArtifactUtils.collectionToString( systemDeps, true ) );
}
}
if ( systemDepsFound )
{
throw new MojoFailureException( "Some reactor artifacts have dependencies with scope \"system\"."
+ " Such dependencies are not supported by XMvn installer."
+ " You should either remove any dependencies with scope \"system\""
+ " before the build or not run XMvn instaler." );
}
}
private void deployArtifact( Artifact artifact )
throws MojoExecutionException
{
DeploymentRequest request = new DeploymentRequest();
request.setArtifact( artifact );
DeploymentResult result = deployer.deploy( request );
if ( result.getException() != null )
throw new MojoExecutionException( "Failed to deploy artifact " + artifact, result.getException() );
}
@Override
public void execute()
throws MojoExecutionException, MojoFailureException
{
handleSystemDependencies();
try
{
for ( MavenProject project : reactorProjects )
{
Artifact mainArtifact = aetherArtifact( project.getArtifact() );
File mainArtifactFile = project.getArtifact().getFile();
Path mainArtifactPath = mainArtifactFile != null ? mainArtifactFile.toPath() : null;
mainArtifact = mainArtifact.setPath( mainArtifactPath );
logger.debug( "Installing main artifact {}", mainArtifact );
logger.debug( "Artifact file is {}", mainArtifactPath );
if ( mainArtifactPath != null )
deployArtifact( mainArtifact );
if ( mainArtifactPath != null && !isTychoProject( project ) )
{
Artifact effectivePomArtifact =
new DefaultArtifact( mainArtifact.getGroupId(), mainArtifact.getArtifactId(), "pom",
mainArtifact.getClassifier(), mainArtifact.getVersion() );
effectivePomArtifact = effectivePomArtifact.setStereotype( "effective" );
Path effectivePom = saveEffectivePom( project.getModel() );
logger.debug( "Effective POM path: {}", effectivePom );
effectivePomArtifact = effectivePomArtifact.setPath( effectivePom );
deployArtifact( effectivePomArtifact );
}
Artifact rawPomArtifact =
new DefaultArtifact( mainArtifact.getGroupId(), mainArtifact.getArtifactId(), "pom",
mainArtifact.getClassifier(), mainArtifact.getVersion() );
rawPomArtifact = rawPomArtifact.setStereotype( "raw" );
File rawPomFile = project.getFile();
Path rawPomPath = rawPomFile != null ? rawPomFile.toPath() : null;
logger.debug( "Raw POM path: {}", rawPomPath );
rawPomArtifact = rawPomArtifact.setPath( rawPomPath );
deployArtifact( rawPomArtifact );
for ( org.apache.maven.artifact.Artifact mavenArtifact : project.getAttachedArtifacts() )
{
Artifact attachedArtifact = aetherArtifact( mavenArtifact );
File attachedArtifactFile = mavenArtifact.getFile();
Path attachedArtifactPath = attachedArtifactFile != null ? attachedArtifactFile.toPath() : null;
attachedArtifact = attachedArtifact.setPath( attachedArtifactPath );
logger.debug( "Installing attached artifact {}", attachedArtifact );
deployArtifact( attachedArtifact );
}
}
}
catch ( IOException e )
{
throw new MojoExecutionException( "Failed to install project", e );
}
}
}
| xmvn-mojo/src/main/java/org/fedoraproject/xmvn/mojo/InstallMojo.java | /*-
* Copyright (c) 2012-2014 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fedoraproject.xmvn.mojo;
import static org.fedoraproject.xmvn.mojo.Utils.aetherArtifact;
import static org.fedoraproject.xmvn.mojo.Utils.saveEffectivePom;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Named;
import org.apache.maven.model.Dependency;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.project.MavenProject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.fedoraproject.xmvn.artifact.Artifact;
import org.fedoraproject.xmvn.artifact.DefaultArtifact;
import org.fedoraproject.xmvn.deployer.Deployer;
import org.fedoraproject.xmvn.deployer.DeploymentRequest;
import org.fedoraproject.xmvn.deployer.DeploymentResult;
import org.fedoraproject.xmvn.utils.ArtifactUtils;
/**
* @author Mikolaj Izdebski
*/
@Mojo( name = "install", aggregator = true, requiresDependencyResolution = ResolutionScope.NONE )
@Named
public class InstallMojo
extends AbstractMojo
{
private static final Set<String> TYCHO_PACKAGING_TYPES = new LinkedHashSet<>();
private static final Set<String> TYCHO_P2_CLASSIFIERS = new LinkedHashSet<>();
static
{
TYCHO_PACKAGING_TYPES.add( "eclipse-plugin" );
TYCHO_PACKAGING_TYPES.add( "eclipse-test-plugin" );
TYCHO_PACKAGING_TYPES.add( "eclipse-feature" );
TYCHO_PACKAGING_TYPES.add( "eclipse-update-site" );
TYCHO_PACKAGING_TYPES.add( "eclipse-application" );
TYCHO_PACKAGING_TYPES.add( "eclipse-repository" );
for ( String packaging : TYCHO_PACKAGING_TYPES )
TYCHO_P2_CLASSIFIERS.add( "p2." + packaging );
}
private static boolean isTychoInjectedDependency( Dependency dependency )
{
return TYCHO_P2_CLASSIFIERS.contains( dependency.getGroupId() );
}
private static boolean isTychoProject( MavenProject project )
{
return TYCHO_PACKAGING_TYPES.contains( project.getPackaging() );
}
private final Logger logger = LoggerFactory.getLogger( InstallMojo.class );
@Parameter( defaultValue = "${project}", readonly = true, required = true )
private MavenProject rootProject;
@Parameter( defaultValue = "${reactorProjects}", readonly = true, required = true )
private List<MavenProject> reactorProjects;
private final Deployer deployer;
@Inject
public InstallMojo( Deployer deployer )
{
this.deployer = deployer;
}
/**
* Dump project dependencies with "system" scope and fail if there are any such dependencies are found.
*/
private void handleSystemDependencies()
throws MojoFailureException
{
boolean systemDepsFound = false;
for ( MavenProject project : reactorProjects )
{
Set<Artifact> systemDeps = new LinkedHashSet<>();
for ( Dependency dependency : project.getModel().getDependencies() )
{
// Ignore dependencies injected by Tycho
if ( isTychoProject( project ) && isTychoInjectedDependency( dependency ) )
continue;
if ( dependency.getScope() != null && dependency.getScope().equals( "system" ) )
{
systemDeps.add( new DefaultArtifact( dependency.getGroupId(), dependency.getArtifactId(),
dependency.getClassifier(), dependency.getType(),
dependency.getVersion() ) );
}
}
if ( !systemDeps.isEmpty() )
{
systemDepsFound = true;
logger.error( "Reactor project {} has system-scoped dependencies: {}",
aetherArtifact( project.getArtifact() ),
ArtifactUtils.collectionToString( systemDeps, true ) );
}
}
if ( systemDepsFound )
{
throw new MojoFailureException( "Some reactor artifacts have dependencies with scope \"system\"."
+ " Such dependencies are not supported by XMvn installer."
+ " You should either remove any dependencies with scope \"system\""
+ " before the build or not run XMvn instaler." );
}
}
private void deployArtifact( Artifact artifact )
throws MojoExecutionException
{
DeploymentRequest request = new DeploymentRequest();
request.setArtifact( artifact );
DeploymentResult result = deployer.deploy( request );
if ( result.getException() != null )
throw new MojoExecutionException( "Failed to deploy artifact " + artifact, result.getException() );
}
@Override
public void execute()
throws MojoExecutionException, MojoFailureException
{
handleSystemDependencies();
try
{
for ( MavenProject project : reactorProjects )
{
Artifact mainArtifact = aetherArtifact( project.getArtifact() );
File mainArtifactFile = project.getArtifact().getFile();
Path mainArtifactPath = mainArtifactFile != null ? mainArtifactFile.toPath() : null;
mainArtifact = mainArtifact.setPath( mainArtifactPath );
logger.debug( "Installing main artifact {}", mainArtifact );
logger.debug( "Artifact file is {}", mainArtifactPath );
if ( mainArtifactPath != null )
deployArtifact( mainArtifact );
if ( mainArtifactPath != null && !isTychoProject( project ) )
{
Artifact effectivePomArtifact =
new DefaultArtifact( mainArtifact.getGroupId(), mainArtifact.getArtifactId(), "pom",
mainArtifact.getClassifier(), mainArtifact.getVersion() );
effectivePomArtifact = effectivePomArtifact.setStereotype( "effective" );
Path effectivePom = saveEffectivePom( project.getModel() );
logger.debug( "Effective POM path: {}", effectivePom );
effectivePomArtifact = effectivePomArtifact.setPath( effectivePom );
deployArtifact( effectivePomArtifact );
}
Artifact rawPomArtifact =
new DefaultArtifact( mainArtifact.getGroupId(), mainArtifact.getArtifactId(), "pom",
mainArtifact.getClassifier(), mainArtifact.getVersion() );
rawPomArtifact = rawPomArtifact.setStereotype( "raw" );
File rawPomFile = project.getFile();
Path rawPomPath = rawPomFile != null ? rawPomFile.toPath() : null;
logger.debug( "Raw POM path: {}", rawPomPath );
rawPomArtifact = rawPomArtifact.setPath( rawPomPath );
deployArtifact( rawPomArtifact );
for ( org.apache.maven.artifact.Artifact mavenArtifact : project.getAttachedArtifacts() )
{
Artifact attachedArtifact = aetherArtifact( mavenArtifact );
File attachedArtifactFile = mavenArtifact.getFile();
Path attachedArtifactPath = attachedArtifactFile != null ? attachedArtifactFile.toPath() : null;
attachedArtifact = attachedArtifact.setPath( attachedArtifactPath );
logger.debug( "Installing attached artifact {}", attachedArtifact );
deployArtifact( attachedArtifact );
}
}
}
catch ( IOException e )
{
throw new MojoExecutionException( "Failed to install project", e );
}
}
}
| Remove unused rootProject MOJO parameter
| xmvn-mojo/src/main/java/org/fedoraproject/xmvn/mojo/InstallMojo.java | Remove unused rootProject MOJO parameter | <ide><path>mvn-mojo/src/main/java/org/fedoraproject/xmvn/mojo/InstallMojo.java
<ide>
<ide> private final Logger logger = LoggerFactory.getLogger( InstallMojo.class );
<ide>
<del> @Parameter( defaultValue = "${project}", readonly = true, required = true )
<del> private MavenProject rootProject;
<del>
<ide> @Parameter( defaultValue = "${reactorProjects}", readonly = true, required = true )
<ide> private List<MavenProject> reactorProjects;
<ide> |
|
JavaScript | mit | 79aea9db581ddb192c5a0ae0906799e01c001a67 | 0 | Raynos/append-only | var Scuttlebutt = require("scuttlebutt")
, filter = Scuttlebutt.filter
, inherits = require("util").inherits
inherits(AppendOnly, Scuttlebutt)
var proto = AppendOnly.prototype
proto.push = push
proto.remove = remove
proto.applyUpdate = applyUpdate
proto.history = history
proto.toJSON = proto.createArray = createArray
module.exports = AppendOnly
function AppendOnly(options) {
if (! (this instanceof AppendOnly)) {
return new AppendOnly(options)
}
Scuttlebutt.call(this, options)
this._store = []
this._hash = {}
this.on('_remove', function removeUpdateFromStore (update) {
var i = this._store.indexOf(update)
if(~i) this._store.splice(i, 1)
})
}
function push(item) {
this.localUpdate({ push: item })
}
function remove(id) {
this.localUpdate({ remove: id.__id ? id.__id : id })
}
function toId (update) {
var ts = update[1]
, source = update[2]
return source + ':' + ts
}
function applyUpdate(update) {
var value = update[0]
this._store.push(update)
if (value.push) {
var item = value.push
, id = toId(update)
Object.defineProperty(item, "__id", {
value: id
, configurable: true
})
this._hash[id] = update
this.emit("item", item)
} else if (value.remove) {
var id = value.remove
, _update = this._hash[id]
;delete this._hash[id]
this.emit("_remove", _update)
this.emit("remove", _update[0].push)
}
return true
}
function history(sources) {
return this._store.filter(function (update) {
return filter(update, sources)
})
}
function createArray() {
var hash = this._hash
return Object.keys(hash).map(findKey, hash)
}
function findKey(key) {
return this[key]
}
| index.js | var Scuttlebutt = require("scuttlebutt")
, filter = Scuttlebutt.filter
, inherits = require("util").inherits
inherits(AppendOnly, Scuttlebutt)
var proto = AppendOnly.prototype
proto.push = push
proto.remove = remove
proto.applyUpdate = applyUpdate
proto.history = history
proto.toJSON = proto.createArray = createArray
module.exports = AppendOnly
function AppendOnly(options) {
if (! (this instanceof AppendOnly)) {
return new AppendOnly(options)
}
Scuttlebutt.call(this, options)
this._store = []
this._hash = {}
}
function push(item) {
this.localUpdate({ push: item })
}
function remove(id) {
this.localUpdate({ remove: id })
}
function applyUpdate(update) {
var value = update[0]
, ts = update[1]
, source = update[2]
// console.log("applyUpdate", update)
this._store.push(update)
if (value.push) {
var item = value.push
, id = source + ":" + ts
Object.defineProperty(item, "__id", {
value: id
, configurable: true
})
this._hash[id] = item
this.emit("item", item)
} else if (value.remove) {
var id = value.remove
, item = this._hash[id]
;delete this._hash[id]
this.emit("remove", item)
}
return true
}
function history(sources) {
return this._store.filter(function (update) {
return filter(update, sources)
})
}
function createArray() {
var hash = this._hash
return Object.keys(hash).map(findKey, hash)
}
function findKey(key) {
return this[key]
}
| emit remove event, so that db knows it can remove an old push.
also, accept remove({__id: ...}) and remove(__id), which is how I thought it worked.
| index.js | emit remove event, so that db knows it can remove an old push. | <ide><path>ndex.js
<ide>
<ide> this._store = []
<ide> this._hash = {}
<add>
<add> this.on('_remove', function removeUpdateFromStore (update) {
<add> var i = this._store.indexOf(update)
<add> if(~i) this._store.splice(i, 1)
<add> })
<ide> }
<ide>
<ide> function push(item) {
<ide> }
<ide>
<ide> function remove(id) {
<del> this.localUpdate({ remove: id })
<add> this.localUpdate({ remove: id.__id ? id.__id : id })
<add>}
<add>
<add>function toId (update) {
<add> var ts = update[1]
<add> , source = update[2]
<add> return source + ':' + ts
<ide> }
<ide>
<ide> function applyUpdate(update) {
<ide> var value = update[0]
<del> , ts = update[1]
<del> , source = update[2]
<del>
<del> // console.log("applyUpdate", update)
<ide>
<ide> this._store.push(update)
<ide>
<ide> if (value.push) {
<ide> var item = value.push
<del> , id = source + ":" + ts
<add> , id = toId(update)
<ide>
<ide> Object.defineProperty(item, "__id", {
<ide> value: id
<ide> , configurable: true
<ide> })
<del> this._hash[id] = item
<add> this._hash[id] = update
<ide> this.emit("item", item)
<ide> } else if (value.remove) {
<ide> var id = value.remove
<del> , item = this._hash[id]
<add> , _update = this._hash[id]
<ide>
<ide> ;delete this._hash[id]
<del> this.emit("remove", item)
<add>
<add> this.emit("_remove", _update)
<add> this.emit("remove", _update[0].push)
<ide> }
<ide> return true
<ide> } |
|
JavaScript | apache-2.0 | 5dc12f9a6de58ae0db4c24fb3dc8723ae12131e5 | 0 | caremerge/pdf.js,nawawi/pdf.js,nawawi/pdf.js,timvandermeij/pdf.js,showpad/mozilla-pdf.js,Shoobx/pdf.js,mainegreen/pdf.js,Snuffleupagus/pdf.js,timvandermeij/pdf.js,xavier114fch/pdf.js,Shoobx/pdf.js,showpad/mozilla-pdf.js,showpad/mozilla-pdf.js,Snuffleupagus/pdf.js,macroplant/pdf.js,mainegreen/pdf.js,mukulmishra18/pdf.js,mozilla/pdf.js,caremerge/pdf.js,timvandermeij/pdf.js,mozilla/pdf.js,Shoobx/pdf.js,macroplant/pdf.js,nawawi/pdf.js,mozilla/pdf.js,mukulmishra18/pdf.js,xavier114fch/pdf.js,macroplant/pdf.js,Snuffleupagus/pdf.js | /* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { createPromiseCapability } from 'pdfjs-lib';
import { getCharacterType } from './pdf_find_utils';
import { getGlobalEventBus } from './dom_events';
const FindState = {
FOUND: 0,
NOT_FOUND: 1,
WRAPPED: 2,
PENDING: 3,
};
const FIND_TIMEOUT = 250; // ms
const CHARACTERS_TO_NORMALIZE = {
'\u2018': '\'', // Left single quotation mark
'\u2019': '\'', // Right single quotation mark
'\u201A': '\'', // Single low-9 quotation mark
'\u201B': '\'', // Single high-reversed-9 quotation mark
'\u201C': '"', // Left double quotation mark
'\u201D': '"', // Right double quotation mark
'\u201E': '"', // Double low-9 quotation mark
'\u201F': '"', // Double high-reversed-9 quotation mark
'\u00BC': '1/4', // Vulgar fraction one quarter
'\u00BD': '1/2', // Vulgar fraction one half
'\u00BE': '3/4', // Vulgar fraction three quarters
};
let normalizationRegex = null;
function normalize(text) {
if (!normalizationRegex) {
// Compile the regular expression for text normalization once.
const replace = Object.keys(CHARACTERS_TO_NORMALIZE).join('');
normalizationRegex = new RegExp(`[${replace}]`, 'g');
}
return text.replace(normalizationRegex, function(ch) {
return CHARACTERS_TO_NORMALIZE[ch];
});
}
/**
* @typedef {Object} PDFFindControllerOptions
* @property {IPDFLinkService} linkService - The navigation/linking service.
* @property {EventBus} eventBus - The application event bus.
*/
/**
* Provides search functionality to find a given string in a PDF document.
*/
class PDFFindController {
/**
* @param {PDFFindControllerOptions} options
*/
constructor({ linkService, eventBus = getGlobalEventBus(), }) {
this._linkService = linkService;
this._eventBus = eventBus;
this._reset();
eventBus.on('findbarclose', this._onFindBarClose.bind(this));
}
get highlightMatches() {
return this._highlightMatches;
}
get pageMatches() {
return this._pageMatches;
}
get pageMatchesLength() {
return this._pageMatchesLength;
}
get selected() {
return this._selected;
}
get state() {
return this._state;
}
/**
* Set a reference to the PDF document in order to search it.
* Note that searching is not possible if this method is not called.
*
* @param {PDFDocumentProxy} pdfDocument - The PDF document to search.
*/
setDocument(pdfDocument) {
if (this._pdfDocument) {
this._reset();
}
if (!pdfDocument) {
return;
}
this._pdfDocument = pdfDocument;
this._firstPageCapability.resolve();
}
executeCommand(cmd, state) {
const pdfDocument = this._pdfDocument;
if (this._state === null || cmd !== 'findagain') {
this._dirtyMatch = true;
}
this._state = state;
this._updateUIState(FindState.PENDING);
this._firstPageCapability.promise.then(() => {
if (!this._pdfDocument ||
(pdfDocument && this._pdfDocument !== pdfDocument)) {
// If the document was closed before searching began, or if the search
// operation was relevant for a previously opened document, do nothing.
return;
}
this._extractText();
if (this._findTimeout) {
clearTimeout(this._findTimeout);
this._findTimeout = null;
}
if (cmd === 'find') {
// Trigger the find action with a small delay to avoid starting the
// search when the user is still typing (saving resources).
this._findTimeout = setTimeout(() => {
this._nextMatch();
this._findTimeout = null;
}, FIND_TIMEOUT);
} else {
this._nextMatch();
}
});
}
_reset() {
this._highlightMatches = false;
this._pdfDocument = null;
this._pageMatches = [];
this._pageMatchesLength = null;
this._state = null;
this._selected = { // Currently selected match.
pageIdx: -1,
matchIdx: -1,
};
this._offset = { // Where the find algorithm currently is in the document.
pageIdx: null,
matchIdx: null,
};
this._extractTextPromises = [];
this._pageContents = []; // Stores the normalized text for each page.
this._matchesCountTotal = 0;
this._pagesToSearch = null;
this._pendingFindMatches = Object.create(null);
this._resumePageIdx = null;
this._dirtyMatch = false;
clearTimeout(this._findTimeout);
this._findTimeout = null;
this._firstPageCapability = createPromiseCapability();
}
/**
* @return {string} The (current) normalized search query.
*/
get _query() {
if (this._state.query !== this._rawQuery) {
this._rawQuery = this._state.query;
this._normalizedQuery = normalize(this._state.query);
}
return this._normalizedQuery;
}
/**
* Helper for multi-term search that fills the `matchesWithLength` array
* and handles cases where one search term includes another search term (for
* example, "tamed tame" or "this is"). It looks for intersecting terms in
* the `matches` and keeps elements with a longer match length.
*/
_prepareMatches(matchesWithLength, matches, matchesLength) {
function isSubTerm(matchesWithLength, currentIndex) {
const currentElem = matchesWithLength[currentIndex];
const nextElem = matchesWithLength[currentIndex + 1];
// Check for cases like "TAMEd TAME".
if (currentIndex < matchesWithLength.length - 1 &&
currentElem.match === nextElem.match) {
currentElem.skipped = true;
return true;
}
// Check for cases like "thIS IS".
for (let i = currentIndex - 1; i >= 0; i--) {
const prevElem = matchesWithLength[i];
if (prevElem.skipped) {
continue;
}
if (prevElem.match + prevElem.matchLength < currentElem.match) {
break;
}
if (prevElem.match + prevElem.matchLength >=
currentElem.match + currentElem.matchLength) {
currentElem.skipped = true;
return true;
}
}
return false;
}
// Sort the array of `{ match: <match>, matchLength: <matchLength> }`
// objects on increasing index first and on the length otherwise.
matchesWithLength.sort(function(a, b) {
return a.match === b.match ? a.matchLength - b.matchLength :
a.match - b.match;
});
for (let i = 0, len = matchesWithLength.length; i < len; i++) {
if (isSubTerm(matchesWithLength, i)) {
continue;
}
matches.push(matchesWithLength[i].match);
matchesLength.push(matchesWithLength[i].matchLength);
}
}
/**
* Determine if the search query constitutes a "whole word", by comparing the
* first/last character type with the preceding/following character type.
*/
_isEntireWord(content, startIdx, length) {
if (startIdx > 0) {
const first = content.charCodeAt(startIdx);
const limit = content.charCodeAt(startIdx - 1);
if (getCharacterType(first) === getCharacterType(limit)) {
return false;
}
}
const endIdx = (startIdx + length - 1);
if (endIdx < (content.length - 1)) {
const last = content.charCodeAt(endIdx);
const limit = content.charCodeAt(endIdx + 1);
if (getCharacterType(last) === getCharacterType(limit)) {
return false;
}
}
return true;
}
_calculatePhraseMatch(query, pageIndex, pageContent, entireWord) {
const matches = [];
const queryLen = query.length;
let matchIdx = -queryLen;
while (true) {
matchIdx = pageContent.indexOf(query, matchIdx + queryLen);
if (matchIdx === -1) {
break;
}
if (entireWord && !this._isEntireWord(pageContent, matchIdx, queryLen)) {
continue;
}
matches.push(matchIdx);
}
this._pageMatches[pageIndex] = matches;
}
_calculateWordMatch(query, pageIndex, pageContent, entireWord) {
const matchesWithLength = [];
// Divide the query into pieces and search for text in each piece.
const queryArray = query.match(/\S+/g);
for (let i = 0, len = queryArray.length; i < len; i++) {
const subquery = queryArray[i];
const subqueryLen = subquery.length;
let matchIdx = -subqueryLen;
while (true) {
matchIdx = pageContent.indexOf(subquery, matchIdx + subqueryLen);
if (matchIdx === -1) {
break;
}
if (entireWord &&
!this._isEntireWord(pageContent, matchIdx, subqueryLen)) {
continue;
}
// Other searches do not, so we store the length.
matchesWithLength.push({
match: matchIdx,
matchLength: subqueryLen,
skipped: false,
});
}
}
// Prepare arrays for storing the matches.
if (!this._pageMatchesLength) {
this._pageMatchesLength = [];
}
this._pageMatchesLength[pageIndex] = [];
this._pageMatches[pageIndex] = [];
// Sort `matchesWithLength`, remove intersecting terms and put the result
// into the two arrays.
this._prepareMatches(matchesWithLength, this._pageMatches[pageIndex],
this._pageMatchesLength[pageIndex]);
}
_calculateMatch(pageIndex) {
let pageContent = this._pageContents[pageIndex];
let query = this._query;
const { caseSensitive, entireWord, phraseSearch, } = this._state;
if (query.length === 0) {
// Do nothing: the matches should be wiped out already.
return;
}
if (!caseSensitive) {
pageContent = pageContent.toLowerCase();
query = query.toLowerCase();
}
if (phraseSearch) {
this._calculatePhraseMatch(query, pageIndex, pageContent, entireWord);
} else {
this._calculateWordMatch(query, pageIndex, pageContent, entireWord);
}
this._updatePage(pageIndex);
if (this._resumePageIdx === pageIndex) {
this._resumePageIdx = null;
this._nextPageMatch();
}
// Update the match count.
const pageMatchesCount = this._pageMatches[pageIndex].length;
if (pageMatchesCount > 0) {
this._matchesCountTotal += pageMatchesCount;
this._updateUIResultsCount();
}
}
_extractText() {
// Perform text extraction once if this method is called multiple times.
if (this._extractTextPromises.length > 0) {
return;
}
let promise = Promise.resolve();
for (let i = 0, ii = this._linkService.pagesCount; i < ii; i++) {
const extractTextCapability = createPromiseCapability();
this._extractTextPromises[i] = extractTextCapability.promise;
promise = promise.then(() => {
return this._pdfDocument.getPage(i + 1).then((pdfPage) => {
return pdfPage.getTextContent({
normalizeWhitespace: true,
});
}).then((textContent) => {
const textItems = textContent.items;
const strBuf = [];
for (let j = 0, jj = textItems.length; j < jj; j++) {
strBuf.push(textItems[j].str);
}
// Store the normalized page content (text items) as one string.
this._pageContents[i] = normalize(strBuf.join(''));
extractTextCapability.resolve(i);
}, (reason) => {
console.error(`Unable to get text content for page ${i + 1}`, reason);
// Page error -- assuming no text content.
this._pageContents[i] = '';
extractTextCapability.resolve(i);
});
});
}
}
_updatePage(index) {
if (this._selected.pageIdx === index) {
// If the page is selected, scroll the page into view, which triggers
// rendering the page, which adds the text layer. Once the text layer
// is built, it will scroll to the selected match.
this._linkService.page = index + 1;
}
this._eventBus.dispatch('updatetextlayermatches', {
source: this,
pageIndex: index,
});
}
_nextMatch() {
const previous = this._state.findPrevious;
const currentPageIndex = this._linkService.page - 1;
const numPages = this._linkService.pagesCount;
this._highlightMatches = true;
if (this._dirtyMatch) {
// Need to recalculate the matches, reset everything.
this._dirtyMatch = false;
this._selected.pageIdx = this._selected.matchIdx = -1;
this._offset.pageIdx = currentPageIndex;
this._offset.matchIdx = null;
this._resumePageIdx = null;
this._pageMatches.length = 0;
this._pageMatchesLength = null;
this._matchesCountTotal = 0;
for (let i = 0; i < numPages; i++) {
// Wipe out any previously highlighted matches.
this._updatePage(i);
// Start finding the matches as soon as the text is extracted.
if (!(i in this._pendingFindMatches)) {
this._pendingFindMatches[i] = true;
this._extractTextPromises[i].then((pageIdx) => {
delete this._pendingFindMatches[pageIdx];
this._calculateMatch(pageIdx);
});
}
}
}
// If there's no query there's no point in searching.
if (this._query === '') {
this._updateUIState(FindState.FOUND);
return;
}
// If we're waiting on a page, we return since we can't do anything else.
if (this._resumePageIdx) {
return;
}
const offset = this._offset;
// Keep track of how many pages we should maximally iterate through.
this._pagesToSearch = numPages;
// If there's already a `matchIdx` that means we are iterating through a
// page's matches.
if (offset.matchIdx !== null) {
const numPageMatches = this._pageMatches[offset.pageIdx].length;
if ((!previous && offset.matchIdx + 1 < numPageMatches) ||
(previous && offset.matchIdx > 0)) {
// The simple case; we just have advance the matchIdx to select
// the next match on the page.
offset.matchIdx = (previous ? offset.matchIdx - 1 :
offset.matchIdx + 1);
this._updateMatch(/* found = */ true);
return;
}
// We went beyond the current page's matches, so we advance to
// the next page.
this._advanceOffsetPage(previous);
}
// Start searching through the page.
this._nextPageMatch();
}
_matchesReady(matches) {
const offset = this._offset;
const numMatches = matches.length;
const previous = this._state.findPrevious;
if (numMatches) {
// There were matches for the page, so initialize `matchIdx`.
offset.matchIdx = (previous ? numMatches - 1 : 0);
this._updateMatch(/* found = */ true);
return true;
}
// No matches, so attempt to search the next page.
this._advanceOffsetPage(previous);
if (offset.wrapped) {
offset.matchIdx = null;
if (this._pagesToSearch < 0) {
// No point in wrapping again, there were no matches.
this._updateMatch(/* found = */ false);
// While matches were not found, searching for a page
// with matches should nevertheless halt.
return true;
}
}
// Matches were not found (and searching is not done).
return false;
}
_nextPageMatch() {
if (this._resumePageIdx !== null) {
console.error('There can only be one pending page.');
}
let matches = null;
do {
const pageIdx = this._offset.pageIdx;
matches = this._pageMatches[pageIdx];
if (!matches) {
// The matches don't exist yet for processing by `_matchesReady`,
// so set a resume point for when they do exist.
this._resumePageIdx = pageIdx;
break;
}
} while (!this._matchesReady(matches));
}
_advanceOffsetPage(previous) {
const offset = this._offset;
const numPages = this._linkService.pagesCount;
offset.pageIdx = (previous ? offset.pageIdx - 1 : offset.pageIdx + 1);
offset.matchIdx = null;
this._pagesToSearch--;
if (offset.pageIdx >= numPages || offset.pageIdx < 0) {
offset.pageIdx = (previous ? numPages - 1 : 0);
offset.wrapped = true;
}
}
_updateMatch(found = false) {
let state = FindState.NOT_FOUND;
const wrapped = this._offset.wrapped;
this._offset.wrapped = false;
if (found) {
const previousPage = this._selected.pageIdx;
this._selected.pageIdx = this._offset.pageIdx;
this._selected.matchIdx = this._offset.matchIdx;
state = (wrapped ? FindState.WRAPPED : FindState.FOUND);
// Update the currently selected page to wipe out any selected matches.
if (previousPage !== -1 && previousPage !== this._selected.pageIdx) {
this._updatePage(previousPage);
}
}
this._updateUIState(state, this._state.findPrevious);
if (this._selected.pageIdx !== -1) {
this._updatePage(this._selected.pageIdx);
}
}
_onFindBarClose(evt) {
const pdfDocument = this._pdfDocument;
// Since searching is asynchronous, ensure that the removal of highlighted
// matches (from the UI) is async too such that the 'updatetextlayermatches'
// events will always be dispatched in the expected order.
this._firstPageCapability.promise.then(() => {
if (!this._pdfDocument ||
(pdfDocument && this._pdfDocument !== pdfDocument)) {
// Only update the UI if the document is open, and is the current one.
return;
}
if (this._findTimeout) {
clearTimeout(this._findTimeout);
this._findTimeout = null;
// Avoid the UI being in a pending state if the findbar is re-opened.
this._updateUIState(FindState.FOUND);
}
this._highlightMatches = false;
this._eventBus.dispatch('updatetextlayermatches', {
source: this,
pageIndex: -1,
});
});
}
_requestMatchesCount() {
const { pageIdx, matchIdx, } = this._selected;
let current = 0, total = this._matchesCountTotal;
if (matchIdx !== -1) {
for (let i = 0; i < pageIdx; i++) {
current += (this._pageMatches[i] && this._pageMatches[i].length) || 0;
}
current += matchIdx + 1;
}
// When searching starts, this method may be called before the `pageMatches`
// have been counted (in `_calculateMatch`). Ensure that the UI won't show
// temporarily broken state when the active find result doesn't make sense.
if (current < 1 || current > total) {
current = total = 0;
}
return { current, total, };
}
_updateUIResultsCount() {
this._eventBus.dispatch('updatefindmatchescount', {
source: this,
matchesCount: this._requestMatchesCount(),
});
}
_updateUIState(state, previous) {
this._eventBus.dispatch('updatefindcontrolstate', {
source: this,
state,
previous,
matchesCount: this._requestMatchesCount(),
});
}
}
export {
FindState,
PDFFindController,
};
| web/pdf_find_controller.js | /* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { createPromiseCapability } from 'pdfjs-lib';
import { getCharacterType } from './pdf_find_utils';
import { getGlobalEventBus } from './dom_events';
const FindState = {
FOUND: 0,
NOT_FOUND: 1,
WRAPPED: 2,
PENDING: 3,
};
const FIND_TIMEOUT = 250; // ms
const CHARACTERS_TO_NORMALIZE = {
'\u2018': '\'', // Left single quotation mark
'\u2019': '\'', // Right single quotation mark
'\u201A': '\'', // Single low-9 quotation mark
'\u201B': '\'', // Single high-reversed-9 quotation mark
'\u201C': '"', // Left double quotation mark
'\u201D': '"', // Right double quotation mark
'\u201E': '"', // Double low-9 quotation mark
'\u201F': '"', // Double high-reversed-9 quotation mark
'\u00BC': '1/4', // Vulgar fraction one quarter
'\u00BD': '1/2', // Vulgar fraction one half
'\u00BE': '3/4', // Vulgar fraction three quarters
};
let normalizationRegex = null;
function normalize(text) {
if (!normalizationRegex) {
// Compile the regular expression for text normalization once.
const replace = Object.keys(CHARACTERS_TO_NORMALIZE).join('');
normalizationRegex = new RegExp(`[${replace}]`, 'g');
}
return text.replace(normalizationRegex, function(ch) {
return CHARACTERS_TO_NORMALIZE[ch];
});
}
/**
* @typedef {Object} PDFFindControllerOptions
* @property {IPDFLinkService} linkService - The navigation/linking service.
* @property {EventBus} eventBus - The application event bus.
*/
/**
* Provides search functionality to find a given string in a PDF document.
*/
class PDFFindController {
/**
* @param {PDFFindControllerOptions} options
*/
constructor({ linkService, eventBus = getGlobalEventBus(), }) {
this._linkService = linkService;
this._eventBus = eventBus;
this._reset();
eventBus.on('findbarclose', this._onFindBarClose.bind(this));
}
get highlightMatches() {
return this._highlightMatches;
}
get pageMatches() {
return this._pageMatches;
}
get pageMatchesLength() {
return this._pageMatchesLength;
}
get selected() {
return this._selected;
}
get state() {
return this._state;
}
/**
* Set a reference to the PDF document in order to search it.
* Note that searching is not possible if this method is not called.
*
* @param {PDFDocumentProxy} pdfDocument - The PDF document to search.
*/
setDocument(pdfDocument) {
if (this._pdfDocument) {
this._reset();
}
if (!pdfDocument) {
return;
}
this._pdfDocument = pdfDocument;
this._firstPageCapability.resolve();
}
executeCommand(cmd, state) {
const pdfDocument = this._pdfDocument;
if (this._state === null || cmd !== 'findagain') {
this._dirtyMatch = true;
}
this._state = state;
this._updateUIState(FindState.PENDING);
this._firstPageCapability.promise.then(() => {
if (!this._pdfDocument ||
(pdfDocument && this._pdfDocument !== pdfDocument)) {
// If the document was closed before searching began, or if the search
// operation was relevant for a previously opened document, do nothing.
return;
}
this._extractText();
if (this._findTimeout) {
clearTimeout(this._findTimeout);
this._findTimeout = null;
}
if (cmd === 'find') {
// Trigger the find action with a small delay to avoid starting the
// search when the user is still typing (saving resources).
this._findTimeout = setTimeout(() => {
this._nextMatch();
this._findTimeout = null;
}, FIND_TIMEOUT);
} else {
this._nextMatch();
}
});
}
_reset() {
this._highlightMatches = false;
this._pdfDocument = null;
this._pageMatches = [];
this._pageMatchesLength = null;
this._state = null;
this._selected = { // Currently selected match.
pageIdx: -1,
matchIdx: -1,
};
this._offset = { // Where the find algorithm currently is in the document.
pageIdx: null,
matchIdx: null,
};
this._extractTextPromises = [];
this._pageContents = []; // Stores the normalized text for each page.
this._matchesCountTotal = 0;
this._pagesToSearch = null;
this._pendingFindMatches = Object.create(null);
this._resumePageIdx = null;
this._dirtyMatch = false;
clearTimeout(this._findTimeout);
this._findTimeout = null;
this._firstPageCapability = createPromiseCapability();
}
/**
* Helper for multi-term search that fills the `matchesWithLength` array
* and handles cases where one search term includes another search term (for
* example, "tamed tame" or "this is"). It looks for intersecting terms in
* the `matches` and keeps elements with a longer match length.
*/
_prepareMatches(matchesWithLength, matches, matchesLength) {
function isSubTerm(matchesWithLength, currentIndex) {
const currentElem = matchesWithLength[currentIndex];
const nextElem = matchesWithLength[currentIndex + 1];
// Check for cases like "TAMEd TAME".
if (currentIndex < matchesWithLength.length - 1 &&
currentElem.match === nextElem.match) {
currentElem.skipped = true;
return true;
}
// Check for cases like "thIS IS".
for (let i = currentIndex - 1; i >= 0; i--) {
const prevElem = matchesWithLength[i];
if (prevElem.skipped) {
continue;
}
if (prevElem.match + prevElem.matchLength < currentElem.match) {
break;
}
if (prevElem.match + prevElem.matchLength >=
currentElem.match + currentElem.matchLength) {
currentElem.skipped = true;
return true;
}
}
return false;
}
// Sort the array of `{ match: <match>, matchLength: <matchLength> }`
// objects on increasing index first and on the length otherwise.
matchesWithLength.sort(function(a, b) {
return a.match === b.match ? a.matchLength - b.matchLength :
a.match - b.match;
});
for (let i = 0, len = matchesWithLength.length; i < len; i++) {
if (isSubTerm(matchesWithLength, i)) {
continue;
}
matches.push(matchesWithLength[i].match);
matchesLength.push(matchesWithLength[i].matchLength);
}
}
/**
* Determine if the search query constitutes a "whole word", by comparing the
* first/last character type with the preceding/following character type.
*/
_isEntireWord(content, startIdx, length) {
if (startIdx > 0) {
const first = content.charCodeAt(startIdx);
const limit = content.charCodeAt(startIdx - 1);
if (getCharacterType(first) === getCharacterType(limit)) {
return false;
}
}
const endIdx = (startIdx + length - 1);
if (endIdx < (content.length - 1)) {
const last = content.charCodeAt(endIdx);
const limit = content.charCodeAt(endIdx + 1);
if (getCharacterType(last) === getCharacterType(limit)) {
return false;
}
}
return true;
}
_calculatePhraseMatch(query, pageIndex, pageContent, entireWord) {
const matches = [];
const queryLen = query.length;
let matchIdx = -queryLen;
while (true) {
matchIdx = pageContent.indexOf(query, matchIdx + queryLen);
if (matchIdx === -1) {
break;
}
if (entireWord && !this._isEntireWord(pageContent, matchIdx, queryLen)) {
continue;
}
matches.push(matchIdx);
}
this._pageMatches[pageIndex] = matches;
}
_calculateWordMatch(query, pageIndex, pageContent, entireWord) {
const matchesWithLength = [];
// Divide the query into pieces and search for text in each piece.
const queryArray = query.match(/\S+/g);
for (let i = 0, len = queryArray.length; i < len; i++) {
const subquery = queryArray[i];
const subqueryLen = subquery.length;
let matchIdx = -subqueryLen;
while (true) {
matchIdx = pageContent.indexOf(subquery, matchIdx + subqueryLen);
if (matchIdx === -1) {
break;
}
if (entireWord &&
!this._isEntireWord(pageContent, matchIdx, subqueryLen)) {
continue;
}
// Other searches do not, so we store the length.
matchesWithLength.push({
match: matchIdx,
matchLength: subqueryLen,
skipped: false,
});
}
}
// Prepare arrays for storing the matches.
if (!this._pageMatchesLength) {
this._pageMatchesLength = [];
}
this._pageMatchesLength[pageIndex] = [];
this._pageMatches[pageIndex] = [];
// Sort `matchesWithLength`, remove intersecting terms and put the result
// into the two arrays.
this._prepareMatches(matchesWithLength, this._pageMatches[pageIndex],
this._pageMatchesLength[pageIndex]);
}
_calculateMatch(pageIndex) {
let pageContent = this._pageContents[pageIndex];
let query = normalize(this._state.query);
const { caseSensitive, entireWord, phraseSearch, } = this._state;
if (query.length === 0) {
// Do nothing: the matches should be wiped out already.
return;
}
if (!caseSensitive) {
pageContent = pageContent.toLowerCase();
query = query.toLowerCase();
}
if (phraseSearch) {
this._calculatePhraseMatch(query, pageIndex, pageContent, entireWord);
} else {
this._calculateWordMatch(query, pageIndex, pageContent, entireWord);
}
this._updatePage(pageIndex);
if (this._resumePageIdx === pageIndex) {
this._resumePageIdx = null;
this._nextPageMatch();
}
// Update the match count.
const pageMatchesCount = this._pageMatches[pageIndex].length;
if (pageMatchesCount > 0) {
this._matchesCountTotal += pageMatchesCount;
this._updateUIResultsCount();
}
}
_extractText() {
// Perform text extraction once if this method is called multiple times.
if (this._extractTextPromises.length > 0) {
return;
}
let promise = Promise.resolve();
for (let i = 0, ii = this._linkService.pagesCount; i < ii; i++) {
const extractTextCapability = createPromiseCapability();
this._extractTextPromises[i] = extractTextCapability.promise;
promise = promise.then(() => {
return this._pdfDocument.getPage(i + 1).then((pdfPage) => {
return pdfPage.getTextContent({
normalizeWhitespace: true,
});
}).then((textContent) => {
const textItems = textContent.items;
const strBuf = [];
for (let j = 0, jj = textItems.length; j < jj; j++) {
strBuf.push(textItems[j].str);
}
// Store the normalized page content (text items) as one string.
this._pageContents[i] = normalize(strBuf.join(''));
extractTextCapability.resolve(i);
}, (reason) => {
console.error(`Unable to get text content for page ${i + 1}`, reason);
// Page error -- assuming no text content.
this._pageContents[i] = '';
extractTextCapability.resolve(i);
});
});
}
}
_updatePage(index) {
if (this._selected.pageIdx === index) {
// If the page is selected, scroll the page into view, which triggers
// rendering the page, which adds the text layer. Once the text layer
// is built, it will scroll to the selected match.
this._linkService.page = index + 1;
}
this._eventBus.dispatch('updatetextlayermatches', {
source: this,
pageIndex: index,
});
}
_nextMatch() {
const previous = this._state.findPrevious;
const currentPageIndex = this._linkService.page - 1;
const numPages = this._linkService.pagesCount;
this._highlightMatches = true;
if (this._dirtyMatch) {
// Need to recalculate the matches, reset everything.
this._dirtyMatch = false;
this._selected.pageIdx = this._selected.matchIdx = -1;
this._offset.pageIdx = currentPageIndex;
this._offset.matchIdx = null;
this._resumePageIdx = null;
this._pageMatches.length = 0;
this._pageMatchesLength = null;
this._matchesCountTotal = 0;
for (let i = 0; i < numPages; i++) {
// Wipe out any previously highlighted matches.
this._updatePage(i);
// Start finding the matches as soon as the text is extracted.
if (!(i in this._pendingFindMatches)) {
this._pendingFindMatches[i] = true;
this._extractTextPromises[i].then((pageIdx) => {
delete this._pendingFindMatches[pageIdx];
this._calculateMatch(pageIdx);
});
}
}
}
// If there's no query there's no point in searching.
if (this._state.query === '') {
this._updateUIState(FindState.FOUND);
return;
}
// If we're waiting on a page, we return since we can't do anything else.
if (this._resumePageIdx) {
return;
}
const offset = this._offset;
// Keep track of how many pages we should maximally iterate through.
this._pagesToSearch = numPages;
// If there's already a `matchIdx` that means we are iterating through a
// page's matches.
if (offset.matchIdx !== null) {
const numPageMatches = this._pageMatches[offset.pageIdx].length;
if ((!previous && offset.matchIdx + 1 < numPageMatches) ||
(previous && offset.matchIdx > 0)) {
// The simple case; we just have advance the matchIdx to select
// the next match on the page.
offset.matchIdx = (previous ? offset.matchIdx - 1 :
offset.matchIdx + 1);
this._updateMatch(/* found = */ true);
return;
}
// We went beyond the current page's matches, so we advance to
// the next page.
this._advanceOffsetPage(previous);
}
// Start searching through the page.
this._nextPageMatch();
}
_matchesReady(matches) {
const offset = this._offset;
const numMatches = matches.length;
const previous = this._state.findPrevious;
if (numMatches) {
// There were matches for the page, so initialize `matchIdx`.
offset.matchIdx = (previous ? numMatches - 1 : 0);
this._updateMatch(/* found = */ true);
return true;
}
// No matches, so attempt to search the next page.
this._advanceOffsetPage(previous);
if (offset.wrapped) {
offset.matchIdx = null;
if (this._pagesToSearch < 0) {
// No point in wrapping again, there were no matches.
this._updateMatch(/* found = */ false);
// While matches were not found, searching for a page
// with matches should nevertheless halt.
return true;
}
}
// Matches were not found (and searching is not done).
return false;
}
_nextPageMatch() {
if (this._resumePageIdx !== null) {
console.error('There can only be one pending page.');
}
let matches = null;
do {
const pageIdx = this._offset.pageIdx;
matches = this._pageMatches[pageIdx];
if (!matches) {
// The matches don't exist yet for processing by `_matchesReady`,
// so set a resume point for when they do exist.
this._resumePageIdx = pageIdx;
break;
}
} while (!this._matchesReady(matches));
}
_advanceOffsetPage(previous) {
const offset = this._offset;
const numPages = this._linkService.pagesCount;
offset.pageIdx = (previous ? offset.pageIdx - 1 : offset.pageIdx + 1);
offset.matchIdx = null;
this._pagesToSearch--;
if (offset.pageIdx >= numPages || offset.pageIdx < 0) {
offset.pageIdx = (previous ? numPages - 1 : 0);
offset.wrapped = true;
}
}
_updateMatch(found = false) {
let state = FindState.NOT_FOUND;
const wrapped = this._offset.wrapped;
this._offset.wrapped = false;
if (found) {
const previousPage = this._selected.pageIdx;
this._selected.pageIdx = this._offset.pageIdx;
this._selected.matchIdx = this._offset.matchIdx;
state = (wrapped ? FindState.WRAPPED : FindState.FOUND);
// Update the currently selected page to wipe out any selected matches.
if (previousPage !== -1 && previousPage !== this._selected.pageIdx) {
this._updatePage(previousPage);
}
}
this._updateUIState(state, this._state.findPrevious);
if (this._selected.pageIdx !== -1) {
this._updatePage(this._selected.pageIdx);
}
}
_onFindBarClose(evt) {
const pdfDocument = this._pdfDocument;
// Since searching is asynchronous, ensure that the removal of highlighted
// matches (from the UI) is async too such that the 'updatetextlayermatches'
// events will always be dispatched in the expected order.
this._firstPageCapability.promise.then(() => {
if (!this._pdfDocument ||
(pdfDocument && this._pdfDocument !== pdfDocument)) {
// Only update the UI if the document is open, and is the current one.
return;
}
if (this._findTimeout) {
clearTimeout(this._findTimeout);
this._findTimeout = null;
// Avoid the UI being in a pending state if the findbar is re-opened.
this._updateUIState(FindState.FOUND);
}
this._highlightMatches = false;
this._eventBus.dispatch('updatetextlayermatches', {
source: this,
pageIndex: -1,
});
});
}
_requestMatchesCount() {
const { pageIdx, matchIdx, } = this._selected;
let current = 0, total = this._matchesCountTotal;
if (matchIdx !== -1) {
for (let i = 0; i < pageIdx; i++) {
current += (this._pageMatches[i] && this._pageMatches[i].length) || 0;
}
current += matchIdx + 1;
}
// When searching starts, this method may be called before the `pageMatches`
// have been counted (in `_calculateMatch`). Ensure that the UI won't show
// temporarily broken state when the active find result doesn't make sense.
if (current < 1 || current > total) {
current = total = 0;
}
return { current, total, };
}
_updateUIResultsCount() {
this._eventBus.dispatch('updatefindmatchescount', {
source: this,
matchesCount: this._requestMatchesCount(),
});
}
_updateUIState(state, previous) {
this._eventBus.dispatch('updatefindcontrolstate', {
source: this,
state,
previous,
matchesCount: this._requestMatchesCount(),
});
}
}
export {
FindState,
PDFFindController,
};
| Only normalize the search query once, in `PDFFindController, for every page being searched
For a short document, such as e.g. the `tracemonkey` file, this repeated normalization won't matter much, but for documents with a couple of thousand pages it seems completely unnecessary (and wasteful) to keep repeating the normalization whenever for every single page.
| web/pdf_find_controller.js | Only normalize the search query once, in `PDFFindController, for every page being searched | <ide><path>eb/pdf_find_controller.js
<ide> }
<ide>
<ide> /**
<add> * @return {string} The (current) normalized search query.
<add> */
<add> get _query() {
<add> if (this._state.query !== this._rawQuery) {
<add> this._rawQuery = this._state.query;
<add> this._normalizedQuery = normalize(this._state.query);
<add> }
<add> return this._normalizedQuery;
<add> }
<add>
<add> /**
<ide> * Helper for multi-term search that fills the `matchesWithLength` array
<ide> * and handles cases where one search term includes another search term (for
<ide> * example, "tamed tame" or "this is"). It looks for intersecting terms in
<ide>
<ide> _calculateMatch(pageIndex) {
<ide> let pageContent = this._pageContents[pageIndex];
<del> let query = normalize(this._state.query);
<add> let query = this._query;
<ide> const { caseSensitive, entireWord, phraseSearch, } = this._state;
<ide>
<ide> if (query.length === 0) {
<ide> }
<ide>
<ide> // If there's no query there's no point in searching.
<del> if (this._state.query === '') {
<add> if (this._query === '') {
<ide> this._updateUIState(FindState.FOUND);
<ide> return;
<ide> } |
|
Java | apache-2.0 | c80883b8610c22dbe1a1c301df2485019e72fc8c | 0 | rajubairishetti/pintail,InMobi/pintail,rajubairishetti/pintail,sreedishps/pintail,InMobi/pintail,sreedishps/pintail | package com.inmobi.messaging.consumer.databus;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Date;
import java.util.concurrent.BlockingQueue;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import com.inmobi.databus.Cluster;
import com.inmobi.messaging.Message;
class PartitionReader {
private static final Log LOG = LogFactory.getLog(PartitionReader.class);
private final PartitionId partitionId;
private final String streamName;
private final PartitionCheckpoint partitionCheckpoint;
private final BlockingQueue<QueueEntry> buffer;
private Date startTime;
private final Path collectorDir;
private Thread thread;
private volatile boolean stopped;
private LocalStreamReader lReader;
private CollectorStreamReader cReader;
private StreamReader currentReader;
private boolean inited = false;
private final long waitTimeForBufferFull;
PartitionReader(PartitionId partitionId,
PartitionCheckpoint partitionCheckpoint, Cluster cluster,
BlockingQueue<QueueEntry> buffer, String streamName,
Date startTime, long waitTimeForFlush, long waitTimeForBufferFull) {
this.partitionId = partitionId;
this.buffer = buffer;
this.startTime = startTime;
this.streamName = streamName;
this.partitionCheckpoint = partitionCheckpoint;
this.waitTimeForBufferFull = waitTimeForBufferFull;
// initialize cluster and its directories
Path streamDir = new Path(cluster.getDataDir(), streamName);
this.collectorDir = new Path(streamDir, partitionId.getCollector());
try {
lReader = new LocalStreamReader(partitionId, cluster, streamName);
cReader = new CollectorStreamReader(partitionId, cluster, streamName,
waitTimeForFlush);
} catch (Exception e) {
throw new RuntimeException(e);
}
LOG.info("Partition reader initialized with partitionId:" + partitionId +
" checkPoint:" + partitionCheckpoint +
" collectorDir:" + collectorDir +
" startTime:" + startTime +
" currentReader:" + currentReader);
}
public synchronized void start() {
Runnable runnable = new Runnable() {
@Override
public void run() {
while (!stopped && !thread.isInterrupted()) {
long startTime = System.currentTimeMillis();
try {
while (!inited) {
initializeCurrentFile();
}
LOG.info("Started streaming the data from reader:" + currentReader);
execute();
if (stopped || thread.isInterrupted())
return;
} catch (Exception e) {
LOG.warn("Error in run", e);
}
long finishTime = System.currentTimeMillis();
LOG.debug("Execution took ms : " + (finishTime - startTime));
try {
long sleep = 1000;
if (sleep > 0) {
LOG.debug("Sleeping for " + sleep);
Thread.sleep(sleep);
}
} catch (InterruptedException e) {
LOG.warn("thread interrupted " + thread.getName(), e);
return;
}
}
}
};
thread = new Thread(runnable, this.partitionId.toString());
LOG.info("Starting thread " + thread.getName());
thread.start();
}
public void close() {
stopped = true;
LOG.info(Thread.currentThread().getName() + " stopped [" + stopped + "]");
if (currentReader != null) {
try {
currentReader.close();
} catch (IOException e) {
LOG.warn("Error closing current stream", e);
}
}
}
private void initializeCurrentFileFromTimeStamp(Date timestamp)
throws Exception {
if (startTime != null) {
if (lReader.initializeCurrentFile(timestamp)) {
currentReader = lReader;
} else if (cReader.initializeCurrentFile(startTime)) {
currentReader = cReader;
} else {
currentReader = null;
}
}
}
private void initializeCurrentFileFromCheckpoint() throws Exception {
String fileName = partitionCheckpoint.getFileName();
if (cReader.isCollectorFile(fileName)) {
if (cReader.initializeCurrentFile(partitionCheckpoint)) {
currentReader = cReader;
} else {
String localStreamFileName =
LocalStreamReader.getLocalStreamFileName(
partitionId.getCollector(), fileName);
if (lReader.initializeCurrentFile(new PartitionCheckpoint(
localStreamFileName, partitionCheckpoint.getLineNum()))) {
currentReader = lReader;
} else {
currentReader = null;
}
}
} else if (lReader.isLocalStreamFile(fileName)) {
LOG.debug("Checkpointed file is in local stream directory");
if (lReader.initializeCurrentFile(partitionCheckpoint)) {
currentReader = lReader;
} else {
currentReader = null;
}
} else {
currentReader = null;
}
}
private void initFromStart() throws Exception {
if (lReader.initFromStart()) {
currentReader = lReader;
} else if (cReader.initFromStart()) {
currentReader = cReader;
} else {
LOG.warn("No files to start");
currentReader = null;
}
}
void initializeCurrentFile() throws Exception {
if (!inited) {
LOG.info("Initializing partition reader's current file");
lReader.build(LocalStreamReader.getBuildTimestamp(startTime, streamName,
partitionId.getCollector(), partitionCheckpoint));
cReader.build();
if (startTime != null) {
initializeCurrentFileFromTimeStamp(startTime);
} else if (partitionCheckpoint != null &&
partitionCheckpoint.getFileName() != null) {
initializeCurrentFileFromCheckpoint();
} else {
initFromStart();
}
if (currentReader != null) {
LOG.info("Intialized currentFile:" + currentReader.getCurrentFile() +
" currentLineNum:" + currentReader.getCurrentLineNum());
}
inited = true;
}
}
Path getCurrentFile() {
if (currentReader != null) {
return currentReader.getCurrentFile();
}
return null;
}
StreamReader getCurrentReader() {
return currentReader;
}
protected void execute() {
if (currentReader == null) {
LOG.info("There is no stream reader, exiting");
return;
}
try {
currentReader.openStream();
LOG.info("Reading file " + currentReader.getCurrentFile() +
" and lineNum:" + currentReader.getCurrentLineNum());
while (!stopped) {
String line = currentReader.readLine();
if (line != null) {
// add the data to queue
byte[] data = Base64.decodeBase64(line);
LOG.debug("Current LineNum: " + currentReader.getCurrentLineNum());
buffer.put(new QueueEntry(new Message(
ByteBuffer.wrap(data)), partitionId,
new PartitionCheckpoint(currentReader.getCurrentFile().getName(),
currentReader.getCurrentLineNum())));
} else {
if (currentReader == lReader) {
lReader.close();
LOG.info("Switching to collector stream as we reached end of" +
" stream on local stream");
cReader.build();
if (cReader.initFromStart()) {
currentReader = cReader;
} else {
LOG.warn("No stream to read");
currentReader.close();
currentReader = null;
}
} else if (currentReader == cReader) {
cReader.close();
LOG.info("Looking for current file in local stream reader");
lReader.build(CollectorStreamReader.getDateFromCollectorFile(
currentReader.getCurrentFile().getName()));
if (!lReader.setCurrentFile(
LocalStreamReader.getLocalStreamFileName(
partitionId.getCollector(),
cReader.getCurrentFile().getName()),
cReader.getCurrentLineNum())) {
LOG.info("Did not find current file in local stream as well.");
currentReader.close();
currentReader = null;
} else {
LOG.info("Switching to local stream as the file got moved");
currentReader = lReader;
}
}
return;
}
}
} catch (Exception e) {
LOG.warn("Error while reading stream", e);
} finally {
try {
if (currentReader != null) {
currentReader.close();
}
} catch (Exception e) {
LOG.warn("Error while closing stream", e);
}
}
}
}
| messaging-client-databus/src/main/java/com/inmobi/messaging/consumer/databus/PartitionReader.java | package com.inmobi.messaging.consumer.databus;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Date;
import java.util.concurrent.BlockingQueue;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import com.inmobi.databus.Cluster;
import com.inmobi.messaging.Message;
class PartitionReader {
private static final Log LOG = LogFactory.getLog(PartitionReader.class);
private final PartitionId partitionId;
private final String streamName;
private final PartitionCheckpoint partitionCheckpoint;
private final BlockingQueue<QueueEntry> buffer;
private Date startTime;
private final Path collectorDir;
private Thread thread;
private volatile boolean stopped;
private LocalStreamReader lReader;
private CollectorStreamReader cReader;
private StreamReader currentReader;
private boolean inited = false;
private final long waitTimeForBufferFull;
PartitionReader(PartitionId partitionId,
PartitionCheckpoint partitionCheckpoint, Cluster cluster,
BlockingQueue<QueueEntry> buffer, String streamName,
Date startTime, long waitTimeForFlush, long waitTimeForBufferFull) {
this.partitionId = partitionId;
this.buffer = buffer;
this.startTime = startTime;
this.streamName = streamName;
this.partitionCheckpoint = partitionCheckpoint;
this.waitTimeForBufferFull = waitTimeForBufferFull;
// initialize cluster and its directories
Path streamDir = new Path(cluster.getDataDir(), streamName);
this.collectorDir = new Path(streamDir, partitionId.getCollector());
try {
lReader = new LocalStreamReader(partitionId, cluster, streamName);
cReader = new CollectorStreamReader(partitionId, cluster, streamName,
waitTimeForFlush);
} catch (Exception e) {
throw new RuntimeException(e);
}
LOG.info("Partition reader initialized with partitionId:" + partitionId +
" checkPoint:" + partitionCheckpoint +
" collectorDir:" + collectorDir +
" startTime:" + startTime +
" currentReader:" + currentReader);
}
public synchronized void start() {
Runnable runnable = new Runnable() {
@Override
public void run() {
while (!stopped && !thread.isInterrupted()) {
long startTime = System.currentTimeMillis();
try {
while (!inited) {
initializeCurrentFile();
}
LOG.info("Started streaming the data from reader:" + currentReader);
execute();
if (stopped || thread.isInterrupted())
return;
} catch (Exception e) {
LOG.warn("Error in run", e);
}
long finishTime = System.currentTimeMillis();
LOG.debug("Execution took ms : " + (finishTime - startTime));
try {
long sleep = 1000;
if (sleep > 0) {
LOG.debug("Sleeping for " + sleep);
Thread.sleep(sleep);
}
} catch (InterruptedException e) {
LOG.warn("thread interrupted " + thread.getName(), e);
return;
}
}
}
};
thread = new Thread(runnable, this.partitionId.toString());
LOG.info("Starting thread " + thread.getName());
thread.start();
}
public void close() {
stopped = true;
LOG.info(Thread.currentThread().getName() + " stopped [" + stopped + "]");
if (currentReader != null) {
try {
currentReader.close();
} catch (IOException e) {
LOG.warn("Error closing current stream", e);
}
}
}
private void initializeCurrentFileFromTimeStamp(Date timestamp)
throws Exception {
if (startTime != null) {
if (lReader.initializeCurrentFile(timestamp)) {
currentReader = lReader;
} else if (cReader.initializeCurrentFile(startTime)) {
currentReader = cReader;
} else {
currentReader = null;
}
}
}
private void initializeCurrentFileFromCheckpoint() throws Exception {
String fileName = partitionCheckpoint.getFileName();
if (cReader.isCollectorFile(fileName)) {
if (cReader.initializeCurrentFile(partitionCheckpoint)) {
currentReader = cReader;
} else {
String localStreamFileName =
LocalStreamReader.getLocalStreamFileName(
partitionId.getCollector(), fileName);
if (lReader.initializeCurrentFile(new PartitionCheckpoint(
localStreamFileName, partitionCheckpoint.getLineNum()))) {
currentReader = lReader;
} else {
currentReader = null;
}
}
} else if (lReader.isLocalStreamFile(fileName)) {
LOG.debug("Checkpointed file is in local stream directory");
if (lReader.initializeCurrentFile(partitionCheckpoint)) {
currentReader = lReader;
} else {
currentReader = null;
}
} else {
currentReader = null;
}
}
private void initFromStart() throws Exception {
if (lReader.initFromStart()) {
currentReader = lReader;
} else if (cReader.initFromStart()) {
currentReader = cReader;
} else {
LOG.warn("No files to start");
currentReader = null;
}
}
void initializeCurrentFile() throws Exception {
if (!inited) {
LOG.info("Initializing partition reader's current file");
lReader.build(LocalStreamReader.getBuildTimestamp(startTime, streamName,
partitionId.getCollector(), partitionCheckpoint));
cReader.build();
if (startTime != null) {
initializeCurrentFileFromTimeStamp(startTime);
} else if (partitionCheckpoint != null &&
partitionCheckpoint.getFileName() != null) {
initializeCurrentFileFromCheckpoint();
} else {
initFromStart();
}
if (currentReader != null) {
LOG.info("Intialized currentFile:" + currentReader.getCurrentFile() +
" currentLineNum:" + currentReader.getCurrentLineNum());
}
inited = true;
}
}
Path getCurrentFile() {
if (currentReader != null) {
return currentReader.getCurrentFile();
}
return null;
}
StreamReader getCurrentReader() {
return currentReader;
}
protected void execute() {
if (currentReader == null) {
LOG.info("There is no stream reader, exiting");
return;
}
try {
currentReader.openStream();
LOG.info("Reading file " + currentReader.getCurrentFile() +
" and lineNum:" + currentReader.getCurrentLineNum());
while (!stopped) {
String line = currentReader.readLine();
if (line != null) {
while (buffer.remainingCapacity() == 0) {
LOG.debug("Waiting for space in buffer");
if (stopped) {
return;
}
Thread.sleep(waitTimeForBufferFull);
}
// add the data to queue
byte[] data = Base64.decodeBase64(line);
LOG.debug("Current LineNum: " + currentReader.getCurrentLineNum());
while (!buffer.offer(new QueueEntry(new Message(
ByteBuffer.wrap(data)), partitionId,
new PartitionCheckpoint(currentReader.getCurrentFile().getName(),
currentReader.getCurrentLineNum())))) {
Thread.sleep(waitTimeForBufferFull);
if (stopped) {
return;
}
LOG.warn("Could not add entry as buffer is full, retrying to add");
}
}
if (line == null) {
if (currentReader == lReader) {
lReader.close();
LOG.info("Switching to collector stream as we reached end of" +
" stream on local stream");
cReader.build();
if (cReader.initFromStart()) {
currentReader = cReader;
} else {
LOG.warn("No stream to read");
currentReader.close();
currentReader = null;
}
} else if (currentReader == cReader) {
cReader.close();
LOG.info("Looking for current file in local stream reader");
lReader.build(CollectorStreamReader.getDateFromCollectorFile(
currentReader.getCurrentFile().getName()));
if (!lReader.setCurrentFile(
LocalStreamReader.getLocalStreamFileName(
partitionId.getCollector(),
cReader.getCurrentFile().getName()),
cReader.getCurrentLineNum())) {
LOG.info("Did not find current file in local stream as well.");
currentReader.close();
currentReader = null;
} else {
LOG.info("Switching to local stream as the file got moved");
currentReader = lReader;
}
}
return;
}
}
} catch (Exception e) {
LOG.warn("Error while reading stream", e);
} finally {
try {
if (currentReader != null) {
currentReader.close();
}
} catch (Exception e) {
LOG.warn("Error while closing stream", e);
}
}
}
}
| Change PartitionReader to call buffer.put instead of offer
| messaging-client-databus/src/main/java/com/inmobi/messaging/consumer/databus/PartitionReader.java | Change PartitionReader to call buffer.put instead of offer | <ide><path>essaging-client-databus/src/main/java/com/inmobi/messaging/consumer/databus/PartitionReader.java
<ide> while (!stopped) {
<ide> String line = currentReader.readLine();
<ide> if (line != null) {
<del> while (buffer.remainingCapacity() == 0) {
<del> LOG.debug("Waiting for space in buffer");
<del> if (stopped) {
<del> return;
<del> }
<del> Thread.sleep(waitTimeForBufferFull);
<del> }
<ide> // add the data to queue
<ide> byte[] data = Base64.decodeBase64(line);
<ide> LOG.debug("Current LineNum: " + currentReader.getCurrentLineNum());
<del> while (!buffer.offer(new QueueEntry(new Message(
<add> buffer.put(new QueueEntry(new Message(
<ide> ByteBuffer.wrap(data)), partitionId,
<ide> new PartitionCheckpoint(currentReader.getCurrentFile().getName(),
<del> currentReader.getCurrentLineNum())))) {
<del> Thread.sleep(waitTimeForBufferFull);
<del> if (stopped) {
<del> return;
<del> }
<del> LOG.warn("Could not add entry as buffer is full, retrying to add");
<del> }
<del> }
<del> if (line == null) {
<add> currentReader.getCurrentLineNum())));
<add> } else {
<ide> if (currentReader == lReader) {
<ide> lReader.close();
<ide> LOG.info("Switching to collector stream as we reached end of" + |
|
Java | apache-2.0 | ea4b2bc8aa17711d49a8df420b74b16561b4e844 | 0 | almende/dialog,almende/dialog,almende/dialog | package com.almende.dialog.adapter;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.logging.Logger;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import com.almende.dialog.LogLevel;
import com.almende.dialog.Settings;
import com.almende.dialog.accounts.AdapterConfig;
import com.almende.dialog.agent.AdapterAgent;
import com.almende.dialog.model.Answer;
import com.almende.dialog.model.MediaProperty.MediaPropertyKey;
import com.almende.dialog.model.MediaProperty.MediumType;
import com.almende.dialog.model.Question;
import com.almende.dialog.model.Session;
import com.almende.dialog.model.ddr.DDRRecord;
import com.almende.dialog.util.DDRUtils;
import com.almende.dialog.util.ServerUtils;
import com.almende.dialog.util.TimeUtils;
import com.askfast.commons.utils.PhoneNumberUtils;
import com.google.i18n.phonenumbers.PhoneNumberUtil.PhoneNumberFormat;
import com.twilio.sdk.TwilioRestClient;
import com.twilio.sdk.resource.factory.CallFactory;
import com.twilio.sdk.resource.instance.Account;
import com.twilio.sdk.resource.instance.Call;
import com.twilio.sdk.verbs.Dial;
import com.twilio.sdk.verbs.Gather;
import com.twilio.sdk.verbs.Hangup;
import com.twilio.sdk.verbs.Play;
import com.twilio.sdk.verbs.Record;
import com.twilio.sdk.verbs.Redirect;
import com.twilio.sdk.verbs.Say;
import com.twilio.sdk.verbs.TwiMLException;
import com.twilio.sdk.verbs.TwiMLResponse;
import com.twilio.sdk.verbs.Verb;
@Path("twilio")
public class TwilioAdapter {
protected static final Logger log = Logger.getLogger(VoiceXMLRESTProxy.class.getName());
protected static final com.almende.dialog.Logger dialogLog = new com.almende.dialog.Logger();
private static final int LOOP_DETECTION=10;
protected String TIMEOUT_URL="timeout";
//protected String EXCEPTION_URL="exception";
public static HashMap<String, String> dial(Map<String, String> addressNameMap, String url, String senderName,
AdapterConfig config, String applicationId) throws Exception {
HashMap<String, String> resultSessionMap = new HashMap<String, String>();
// If it is a broadcast don't provide the remote address because it is deceiving.
String loadAddress = "";
if (addressNameMap.size() == 1)
loadAddress = addressNameMap.keySet().iterator().next();
//fetch the question
Question question = Question.fromURL(url, config.getConfigId(), loadAddress, config.getMyAddress(), null, null);
for (String address : addressNameMap.keySet()) {
String formattedAddress = PhoneNumberUtils.formatNumber(address, PhoneNumberFormat.E164);
if (formattedAddress != null) {
//avoid multiple calls to be made to the same number, from the same adapter.
Session session = Session.getSession(Session.getSessionKey(config, formattedAddress));
if (session != null) {
// recreate a fresh session
session.drop();
session = Session.getOrCreateSession(config, formattedAddress);
}
else {
session = Session.getOrCreateSession(config, formattedAddress);
}
session.killed = false;
session.setStartUrl(url);
session.setDirection("outbound");
session.setRemoteAddress(formattedAddress);
session.setType(AdapterAgent.ADAPTER_TYPE_TWILIO);
session.setAdapterID(config.getConfigId());
session.setQuestion(question);
dialogLog.log(LogLevel.INFO, session.getAdapterConfig(), String.format("Outgoing call requested from: %s to: %s",
session.getLocalAddress(),
formattedAddress), session);
String extSession = "";
if (!ServerUtils.isInUnitTestingEnvironment()) {
String accountSid = config.getAccessToken();
String authToken = config.getAccessTokenSecret();
TwilioRestClient twilio = new TwilioRestClient(accountSid, authToken);
// Get the main account (The one we used to authenticate the client)
Account mainAccount = twilio.getAccount();
// Make a call
CallFactory callFactory = mainAccount.getCallFactory();
Map<String, String> callParams = new HashMap<String, String>();
callParams.put("To", formattedAddress); // Replace with a valid phone number
callParams.put("From", config.getMyAddress()); // Replace with a valid phone
// number in your account
callParams.put("ApplicationSid", applicationId);
//callParams.put("Url", "http://" + Settings.HOST + "/dialoghandler/rest/twilio/new");
callParams.put("StatusCallback", "http://" + Settings.HOST + "/dialoghandler/rest/twilio/cc");
callParams.put("StatusCallbackMethod", "GET");
callParams.put("IfMachine", "Hangup");
callParams.put("Record", "false");
Call call = callFactory.create(callParams);
System.out.println(call.getSid());
extSession = call.getSid();
}
session.setExternalSession(extSession);
session.storeSession();
resultSessionMap.put(formattedAddress, session.getKey());
}
else {
resultSessionMap.put(address, "Invalid address");
log.severe(String.format("To address is invalid: %s. Ignoring.. ", address));
}
}
return resultSessionMap;
}
@Path("new")
@GET
@Produces("application/xml")
public Response getNewDialog(@QueryParam("CallSid") String CallSid,
@QueryParam("AccountSid") String AccountSid,
@QueryParam("From") String localID,
@QueryParam("To") String remoteID,
@QueryParam("Direction") String direction) {
log.info("call started:"+direction+":"+remoteID+":"+localID);
if (direction.equals("inbound")) {
String tmpLocalId = new String(localID);
localID = new String(remoteID);
remoteID = tmpLocalId;
}
AdapterConfig config = AdapterConfig.findAdapterConfig(AdapterAgent.ADAPTER_TYPE_TWILIO, localID);
String formattedRemoteId = PhoneNumberUtils.formatNumber(remoteID, null);
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO+"|"+localID+"|"+ formattedRemoteId;
Session session = Session.getSession(sessionKey);
String url = "";
if ( session != null && direction.startsWith("outbound")) {
url = session.getStartUrl();
dialogLog.log(LogLevel.INFO, config, String
.format("Trying to fetch dialog for %s, due to outgoing Call from: %s ",
formattedRemoteId, config.getMyAddress()), session);
}
else if(direction.equals("inbound")) {
//create a session for incoming only
session = Session.getSession(Session.getSessionKey(config, formattedRemoteId));
if(session != null) {
session.drop();
}
session = Session.getOrCreateSession(config, formattedRemoteId);
}
if(session != null) {
session.setStartUrl( url );
session.setDirection( direction );
session.setRemoteAddress( formattedRemoteId );
session.setType( AdapterAgent.ADAPTER_TYPE_TWILIO );
session.setAccountId( config.getOwner() );
session.setAdapterID( config.getConfigId() );
}
else {
log.severe(String.format("Session %s not found", sessionKey));
return null;
}
Question question = session.getQuestion();
if(question == null) {
question = Question.fromURL(url, session.getAdapterConfig().getConfigId(), formattedRemoteId, localID,
session.getDdrRecordId(), session.getKey());
}
session.setQuestion(question);
if (session.getQuestion() != null) {
//create ddr record
DDRRecord ddrRecord = null;
try {
if (direction.contains("outbound")) {
ddrRecord = DDRUtils.createDDRRecordOnOutgoingCommunication(config, formattedRemoteId, 1, url);
}
else {
ddrRecord = DDRUtils.createDDRRecordOnIncomingCommunication(config, formattedRemoteId, 1, url);
}
session.setDdrRecordId( ddrRecord != null ? ddrRecord.getId() : null);
ddrRecord.addAdditionalInfo(Session.TRACKING_TOKEN_KEY, session.getTrackingToken());
}
catch (Exception e) {
String errorMessage = String.format("Creating DDR records failed. Direction: %s for adapterId: %s with address: %s remoteId: %s and localId: %s",
direction, config.getConfigId(), config.getMyAddress(), formattedRemoteId,
localID);
log.severe(errorMessage);
dialogLog.severe(config.getConfigId(), errorMessage, ddrRecord != null ? ddrRecord.getId() : null,
sessionKey);
}
finally {
ddrRecord.createOrUpdate();
session.storeSession();
}
return handleQuestion( question, config, formattedRemoteId, sessionKey );
}
else {
return Response.ok().build();
}
}
@Path("new")
@POST
@Produces("application/xml")
public Response getNewDialogPost(@FormParam("CallSid") String CallSid,
@FormParam("AccountSid") String AccountSid,
@FormParam("From") String localID,
@FormParam("To") String remoteID,
@FormParam("Direction") String direction) {
log.info("call started:"+direction+":"+remoteID+":"+localID);
//swap the remote and the local numbers if its inbound
if(direction.equals("inbound")) {
String tmpLocalId = new String(localID);
localID = new String(remoteID);
remoteID = tmpLocalId;
}
AdapterConfig config = AdapterConfig.findAdapterConfig(AdapterAgent.ADAPTER_TYPE_TWILIO, localID);
String formattedRemoteId = PhoneNumberUtils.formatNumber(remoteID, null);
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO+"|"+localID+"|"+ formattedRemoteId;
Session session = Session.getSession(sessionKey);
String url = "";
if ( session != null && direction.startsWith("outbound")) {
url = session.getStartUrl();
dialogLog.log(LogLevel.INFO, config, String
.format("Trying to fetch dialog for %s, due to outgoing Call from: %s ",
formattedRemoteId, config.getMyAddress()), session);
}
else if(direction.equals("inbound")) {
//create a session for incoming only
session = Session.getSession(Session.getSessionKey(config, formattedRemoteId));
if(session != null) {
session.drop();
}
session = Session.getOrCreateSession(config, formattedRemoteId);
url = config.getURLForInboundScenario();
}
if(session != null) {
session.setStartUrl( url );
session.setDirection( direction );
session.setRemoteAddress( formattedRemoteId );
session.setType( AdapterAgent.ADAPTER_TYPE_TWILIO );
session.setAccountId( config.getOwner() );
session.setAdapterID( config.getConfigId() );
}
else {
log.severe(String.format("Session %s not found", sessionKey));
return null;
}
Question question = session.getQuestion();
if(question == null) {
question = Question.fromURL(url, session.getAdapterConfig().getConfigId(), formattedRemoteId, localID,
session.getDdrRecordId(), session.getKey());
}
session.setQuestion(question);
if (session.getQuestion() != null) {
//create ddr record
DDRRecord ddrRecord = null;
try {
if (direction.contains("outbound")) {
ddrRecord = DDRUtils.createDDRRecordOnOutgoingCommunication(config, formattedRemoteId, 1, url);
}
else {
ddrRecord = DDRUtils.createDDRRecordOnIncomingCommunication(config, formattedRemoteId, 1, url);
}
session.setDdrRecordId( ddrRecord != null ? ddrRecord.getId() : null);
ddrRecord.addAdditionalInfo(Session.TRACKING_TOKEN_KEY, session.getTrackingToken());
}
catch (Exception e) {
String errorMessage = String.format("Creating DDR records failed. Direction: %s for adapterId: %s with address: %s remoteId: %s and localId: %s",
direction, config.getConfigId(), config.getMyAddress(), formattedRemoteId,
localID);
log.severe(errorMessage);
dialogLog.severe(config.getConfigId(), errorMessage, ddrRecord != null ? ddrRecord.getId() : null,
sessionKey);
}
finally {
ddrRecord.createOrUpdate();
session.storeSession();
}
return handleQuestion( question, config, formattedRemoteId, sessionKey );
}
else {
return Response.ok().build();
}
}
@Path("answer")
@GET
@Produces("application/xml")
public Response answer(@QueryParam("answerId") String answer_id, @QueryParam("Digits") String answer_input,
@QueryParam("From") String localID, @QueryParam("To") String remoteID,
@QueryParam("Direction") String direction, @QueryParam("RecordingUrl") String recordingUrl,
@QueryParam("DialCallStatus") String dialCallStatus, @QueryParam("DialCallSid") String dialCallSid) {
TwiMLResponse twiml = new TwiMLResponse();
try {
answer_input = answer_input != null ? URLDecoder.decode(answer_input, "UTF-8") : answer_input;
}
catch (UnsupportedEncodingException e) {
log.warning(String.format("Answer input decode failed for: %s", answer_input));
}
if(recordingUrl!=null) {
answer_input= recordingUrl.replace(".wav", "") + ".wav";
}
if (direction.equals("inbound")) {
String tmpLocalId = new String(localID);
localID = new String(remoteID);
remoteID = tmpLocalId;
}
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO + "|" + localID + "|" + remoteID;
Session session = Session.getSession(sessionKey);
List<String> callIgnored = Arrays.asList("no-answer", "busy", "canceled", "failed");
// Remove the referralSession
if ("completed".equals(dialCallStatus)) {
AdapterConfig config = session.getAdapterConfig();
finalizeCall(config, null, dialCallSid, null);
}
//if call is rejected. call the hangup event
else if (callIgnored.contains(dialCallStatus) && session != null && session.getQuestion() != null) {
Map<String, String> extras = session.getExtras();
extras.put("requester", session.getLocalAddress());
Question noAnswerQuestion = session.getQuestion().event("hangup", "Call rejected", extras, remoteID);
AdapterConfig config = session.getAdapterConfig();
finalizeCall(config, null, dialCallSid, null);
return handleQuestion(noAnswerQuestion, session.getAdapterConfig(), remoteID, sessionKey);
}
if (session != null) {
Question question = session.getQuestion();
if (question != null) {
String responder = session.getRemoteAddress();
if (session.killed) {
log.warning("session is killed");
return Response.status(Response.Status.BAD_REQUEST).build();
}
if (question.getType() != null && !question.getType().equalsIgnoreCase("comment")) {
dialogLog.log(LogLevel.INFO,
session.getAdapterConfig(),
String.format("Answer input: %s from: %s to question: %s", answer_input,
session.getRemoteAddress(), question.getQuestion_expandedtext()),
session);
}
String answerForQuestion = question.getQuestion_expandedtext();
question = question.answer(responder, session.getAdapterConfig().getConfigId(), answer_id,
answer_input, sessionKey);
//reload the session
session = Session.getSession(sessionKey);
session.setQuestion(question);
session.storeSession();
//check if ddr is in session. save the answer in the ddr
if (session.getDdrRecordId() != null) {
try {
DDRRecord ddrRecord = DDRRecord.getDDRRecord(session.getDdrRecordId(), session.getAccountId());
if (ddrRecord != null) {
ddrRecord.addAdditionalInfo(DDRRecord.ANSWER_INPUT_KEY + ":" + answerForQuestion,
answer_input);
ddrRecord.createOrUpdateWithLog();
}
}
catch (Exception e) {
e.printStackTrace();
}
}
return handleQuestion(question, session.getAdapterConfig(), responder, sessionKey);
}
else {
log.warning("No question found in session!");
}
}
else {
log.warning("No session found for: " + sessionKey);
dialogLog.severe(null, "No session found!", session);
}
String reply = twiml.toXML();
return Response.ok(reply).build();
}
@Path("timeout")
@GET
@Produces("application/xml")
public Response timeout(@QueryParam("From") String localID, @QueryParam("To") String remoteID,
@QueryParam("Direction") String direction) throws Exception {
//swap local and remote ids if its an incoming call
if (direction.equals("inbound")) {
String tmpLocalId = new String(localID);
localID = new String(remoteID);
remoteID = tmpLocalId;
}
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO+"|"+localID+"|"+ remoteID;
Session session = Session.getSession(sessionKey);
if (session != null) {
Question question = session.getQuestion();
String responder = session.getRemoteAddress();
if (session.killed) {
return Response.status(Response.Status.BAD_REQUEST).build();
}
dialogLog.log(LogLevel.INFO,
session.getAdapterConfig(),
String.format("Timeout from: %s for question: %s", responder,
question.getQuestion_expandedtext()), session);
HashMap<String, Object> extras = new HashMap<String, Object>();
extras.put("sessionKey", sessionKey);
extras.put("requester", session.getLocalAddress());
question = question.event("timeout", "No answer received", extras, responder);
session.setQuestion(question);
if (question != null) {
String retryLimit = question.getMediaPropertyValue(MediumType.BROADSOFT, MediaPropertyKey.RETRY_LIMIT);
retryLimit = retryLimit != null ? retryLimit : String.valueOf(Question.DEFAULT_MAX_QUESTION_LOAD);
Integer retryCount = session.getRetryCount();
retryCount = retryCount != null ? retryCount : 0;
if (retryCount < Integer.parseInt(retryLimit)) {
session.setRetryCount(++retryCount);
}
else {
//hangup so set question to null
question = null;
}
}
else {
log.warning("No question found for this session :" + sessionKey);
}
session.storeSession();
return handleQuestion(question, session.getAdapterConfig(), responder, sessionKey);
}
else {
log.warning("Strange that no session is found for: " + sessionKey);
}
TwiMLResponse twiml = new TwiMLResponse();
String reply = twiml.toXML();
return Response.ok(reply).build();
}
@Path("preconnect")
@GET
@Produces("application/voicexml+xml")
public Response preconnect(@QueryParam("From") String localID, @QueryParam("To") String remoteID,
@QueryParam("Direction") String direction) {
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO + "|" + localID + "|" + remoteID;
String reply = (new TwiMLResponse()).toXML();
Session session = Session.getSession(sessionKey);
if (session != null && session.getQuestion() != null) {
Question question = session.getQuestion();
String responder = session.getRemoteAddress();
if (session.killed) {
return Response.status(Response.Status.BAD_REQUEST).build();
}
dialogLog.log(LogLevel.INFO,
session.getAdapterConfig(),
String.format("Wrong answer received from: %s for question: %s", responder,
question.getQuestion_expandedtext()), session);
HashMap<String, String> extras = new HashMap<String, String>();
extras.put("sessionKey", sessionKey);
extras.put("requester", session.getLocalAddress());
question = question.event("preconnect", "preconnect event", extras, responder);
//reload the session
session = Session.getSession(sessionKey);
session.setQuestion(question);
session.storeSession();
return handleQuestion(question, session.getAdapterConfig(), responder, sessionKey);
}
return Response.ok(reply).build();
}
@Path("cc")
@GET
public Response receiveCCMessage(@QueryParam( "CallSid" ) String callSid,
@QueryParam( "From" ) String localID,
@QueryParam( "To" ) String remoteID,
@QueryParam( "Direction" ) String direction,
@QueryParam( "CallStatus" ) String status) {
if(direction.equals("outbound-api")) {
direction = "outbound";
}
if(direction.equals("inbound")) {
String tmpLocalId = localID;
localID = remoteID;
remoteID = tmpLocalId;
}
AdapterConfig config = AdapterConfig.findAdapterConfig(AdapterAgent.ADAPTER_TYPE_TWILIO, localID);
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO + "|" + config.getMyAddress() +
"|" + remoteID;
Session session = Session.getSession(sessionKey);
if (session != null) {
//update session with call timings
if (status.equals("completed")) {
finalizeCall(config, session, callSid, remoteID);
}
}
log.info("Session key: " + sessionKey);
return Response.ok("").build();
}
/**
* Retrieve call information and with that:
* - update ddr record
* - destroy session
* - send hangup
* @param config
* @param session
* @param callSid
* @param direction
* @param remoteID
*/
private void finalizeCall(AdapterConfig config, Session session, String callSid, String remoteID) {
String accountSid = config.getAccessToken();
String authToken = config.getAccessTokenSecret();
TwilioRestClient client = new TwilioRestClient(accountSid, authToken);
Call call = client.getAccount().getCall(callSid);
if(session==null) {
String localAddress = call.getFrom();
remoteID = call.getTo();
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO + "|" + localAddress + "|" + remoteID;
session = Session.getSession(sessionKey);
}
if(session!=null) {
log.info("Finalizing call for: "+session.getKey());
String pattern = "EEE, dd MMM yyyy HH:mm:ss Z";
SimpleDateFormat format = new SimpleDateFormat(pattern, Locale.ENGLISH);
String direction = call.getDirection() != null && call.getDirection().equalsIgnoreCase("outbound-dial") ? "outbound"
: "inbound";
Long startTime = 0L;
Long answerTime = 0L;
Long endTime = 0L;
try {
String created = call.getProperty("date_created");
startTime = format.parse(created).getTime();
endTime = format.parse(call.getEndTime()).getTime();
answerTime = call.getDuration().equals("0") ? endTime : format.parse(call.getStartTime()).getTime();
//sometimes answerTimeStamp is only given in the ACTIVE ccxml
session.setAnswerTimestamp(answerTime.toString());
session.setStartTimestamp(startTime+"");
session.setReleaseTimestamp(endTime+"");
session.setDirection(direction);
session.setRemoteAddress(remoteID);
session.setLocalAddress(config.getMyAddress());
session.storeSession();
//flush the keys if ddrProcessing was successful
if (DDRUtils.stopDDRCosts(session.getKey(), true)) {
session.drop();
}
hangup(session);
} catch (Exception e) {
e.printStackTrace();
}
} else {
log.warning("Failed to finalize call because no session was found for: " + callSid);
}
}
/**
* hang up a call based on the session.
*
* @param session if null, doesnt trigger an hangup event. Also expects a question to be there in this session, or atleast a
* startURL from where the question can be fetched.
* @return
* @throws Exception
*/
public Response hangup(Session session) throws Exception {
if (session != null) {
log.info("call hangup with:" + session.getDirection() + ":" + session.getRemoteAddress() + ":" +
session.getLocalAddress());
if (session.getQuestion() == null) {
Question question = Question.fromURL(session.getStartUrl(), session.getAdapterConfig().getConfigId(),
session.getRemoteAddress(), session.getLocalAddress(),
session.getDdrRecordId(), session.getKey());
session.setQuestion(question);
}
if (session.getQuestion() != null && !isEventTriggered("hangup", session)) {
HashMap<String, Object> timeMap = getTimeMap(session.getStartTimestamp(), session.getAnswerTimestamp(),
session.getReleaseTimestamp());
timeMap.put("referredCalledId", session.getExtras().get("referredCalledId"));
timeMap.put("sessionKey", session.getKey());
if(session.getExtras() != null && !session.getExtras().isEmpty()) {
timeMap.putAll(session.getExtras());
}
Response hangupResponse = handleQuestion(null, session.getAdapterConfig(), session.getRemoteAddress(),
session.getKey());
timeMap.put("requester", session.getLocalAddress());
session.getQuestion().event("hangup", "Hangup", timeMap, session.getRemoteAddress());
dialogLog.log(LogLevel.INFO, session.getAdapterConfig(),
String.format("Call hungup from: %s", session.getRemoteAddress()), session);
return hangupResponse;
}
else {
log.info("no question received");
}
}
return Response.ok("").build();
}
/**
* @param startTime
* @param answerTime
* @param releaseTime
* @return
*/
private HashMap<String, Object> getTimeMap( String startTime, String answerTime, String releaseTime )
{
HashMap<String, Object> timeMap = new HashMap<String, Object>();
timeMap.put( "startTime", startTime );
timeMap.put( "answerTime", answerTime );
timeMap.put( "releaseTime", releaseTime );
return timeMap;
}
/**
* check if for this session an
* @param eventName
* @param session
* @return
*/
private static boolean isEventTriggered(String eventName, Session session) {
if (session != null) {
if (session.getExtras().get("event_" + eventName) != null) {
String timestamp = TimeUtils.getStringFormatFromDateTime(Long.parseLong(session.getExtras()
.get("event_" + eventName)), null);
log.warning(eventName + "event already triggered before for this session at: " + timestamp);
return true;
}
else {
session.getExtras().put("event_" + eventName, String.valueOf(TimeUtils.getServerCurrentTimeInMillis()));
session.storeSession();
}
}
return false;
}
public class Return {
ArrayList<String> prompts;
Question question;
public Return(ArrayList<String> prompts, Question question) {
this.prompts = prompts;
this.question = question;
}
}
public Return formQuestion(Question question, String adapterID, String address, String ddrRecordId,
String sessionKey) {
ArrayList<String> prompts = new ArrayList<String>();
for (int count = 0; count <= LOOP_DETECTION; count++) {
if (question == null)
break;
log.info("Going to form question of type: " + question.getType());
if (question.getType() == null) {
question = null;
break;
}
String preferred_language = question.getPreferred_language();
question.setPreferred_language(preferred_language);
String qText = question.getQuestion_text();
if (qText != null && !qText.equals("")) {
prompts.add(qText);
}
if (question.getType().equalsIgnoreCase("closed")) {
for (Answer ans : question.getAnswers()) {
String answer = ans.getAnswer_text();
if (answer != null && !answer.equals("") && !answer.startsWith("dtmfKey://")) {
prompts.add(answer);
}
}
break; //Jump from forloop
}
else if (question.getType().equalsIgnoreCase("comment")) {
//question = question.answer(null, adapterID, null, null);
break;
}
else if (question.getType().equalsIgnoreCase("referral")) {
if (question.getUrl() != null && !question.getUrl().startsWith("tel:")) {
question = Question.fromURL(question.getUrl(), adapterID, address, ddrRecordId, sessionKey);
//question = question.answer(null, null, null);
// break;
}
else {
// Break out because we are going to reconnect
break;
}
}
else {
break; //Jump from forloop (open questions, etc.)
}
}
return new Return(prompts, question);
}
protected String renderComment(Question question, ArrayList<String> prompts, String sessionKey) {
TwiMLResponse twiml = new TwiMLResponse();
try {
addPrompts(prompts, question.getPreferred_language(), twiml);
Redirect redirect = new Redirect(getAnswerUrl());
redirect.setMethod("GET");
twiml.append(redirect);
}
catch (TwiMLException e) {
e.printStackTrace();
}
return twiml.toXML();
}
protected String renderReferral(Question question,ArrayList<String> prompts, String sessionKey, String remoteID){
TwiMLResponse twiml = new TwiMLResponse();
try {
addPrompts(prompts, question.getPreferred_language(), twiml);
String redirectTimeoutProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TIMEOUT );
String redirectTimeout = redirectTimeoutProperty != null ? redirectTimeoutProperty.replace("s", "") : "30";
int timeout = 30;
try {
timeout = Integer.parseInt(redirectTimeout);
}
catch (NumberFormatException e) {
e.printStackTrace();
}
com.twilio.sdk.verbs.Number number = new com.twilio.sdk.verbs.Number(question.getUrl());
String usePreconnect = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.USE_PRECONNECT );
usePreconnect = usePreconnect != null ? usePreconnect : "false";
boolean preconnect = Boolean.parseBoolean(usePreconnect);
if(preconnect) {
number.setMethod("GET");
number.setUrl(getPreconnectUrl());
}
Dial dial = new Dial();
dial.setCallerId(remoteID);
dial.append(number);
dial.setTimeout(timeout);
dial.setMethod("GET");
dial.setAction(getAnswerUrl());
twiml.append(dial);
}catch(TwiMLException e ) {
log.warning("Failed to create referal");
}
return twiml.toXML();
}
protected String renderClosedQuestion(Question question, ArrayList<String> prompts, String sessionKey) {
try {
sessionKey = URLEncoder.encode(sessionKey, "UTF-8");
}
catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
TwiMLResponse twiml = new TwiMLResponse();
Gather gather = new Gather();
gather.setAction(getAnswerUrl());
gather.setMethod("GET");
gather.setNumDigits(1);
String noAnswerTimeout = question.getMediaPropertyValue(MediumType.BROADSOFT, MediaPropertyKey.TIMEOUT);
boolean useHash = true;
if(question.getAnswers().size() > 11) {
useHash = false;
}
else {
List<Answer> answers = question.getAnswers();
for (Answer answer : answers) {
if (answer != null && answer.getAnswer_text() != null &&
answer.getAnswer_text().startsWith("dtmfKey://#")) {
useHash = true;
break;
}
}
}
//assign a default timeout if one is not specified
noAnswerTimeout = noAnswerTimeout != null ? noAnswerTimeout : "5";
if (noAnswerTimeout.endsWith("s")) {
log.warning("No answer timeout must end with 's'. E.g. 10s. Found: " + noAnswerTimeout);
noAnswerTimeout = noAnswerTimeout.replace("s", "");
}
int timeout = 5;
try {
timeout = Integer.parseInt(noAnswerTimeout);
}
catch (NumberFormatException e) {
e.printStackTrace();
}
gather.setTimeout(timeout);
if(useHash) {
gather.setFinishOnKey("");
}
try {
addPrompts(prompts, question.getPreferred_language(), gather);
twiml.append(gather);
Redirect redirect = new Redirect(getTimeoutUrl());
redirect.setMethod("GET");
twiml.append(redirect);
}
catch (TwiMLException e) {
e.printStackTrace();
}
return twiml.toXML();
}
protected String renderOpenQuestion(Question question,ArrayList<String> prompts,String sessionKey) {
TwiMLResponse twiml = new TwiMLResponse();
String typeProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TYPE );
if(typeProperty!=null && typeProperty.equalsIgnoreCase("audio")) {
renderVoiceMailQuestion(question, prompts, sessionKey, twiml);
} else {
Gather gather = new Gather();
gather.setAction(getAnswerUrl());
gather.setMethod("GET");
String dtmfMaxLength = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.ANSWER_INPUT_MAX_LENGTH );
if(dtmfMaxLength!=null) {
try {
int digits = Integer.parseInt(dtmfMaxLength);
gather.setNumDigits(digits);
}
catch (NumberFormatException e) {
e.printStackTrace();
}
}
String noAnswerTimeout = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TIMEOUT );
//assign a default timeout if one is not specified
noAnswerTimeout = noAnswerTimeout != null ? noAnswerTimeout : "5";
if (noAnswerTimeout.endsWith("s")) {
log.warning("No answer timeout must end with 's'. E.g. 10s. Found: " + noAnswerTimeout);
noAnswerTimeout = noAnswerTimeout.replace("s", "");
}
int timeout = 5;
try {
timeout = Integer.parseInt(noAnswerTimeout);
}
catch (NumberFormatException e) {
e.printStackTrace();
}
gather.setTimeout(timeout);
try {
addPrompts(prompts, question.getPreferred_language(), gather);
twiml.append(gather);
Redirect redirect = new Redirect(getTimeoutUrl());
redirect.setMethod("GET");
twiml.append(redirect);
}
catch (TwiMLException e) {
e.printStackTrace();
}
}
return twiml.toXML();
}
/** renders/updates the xml for recording an audio and posts it to the user on the callback
* @param question
* @param prompts
* @param sessionKey
* @param outputter
* @throws IOException
* @throws UnsupportedEncodingException
*/
protected void renderVoiceMailQuestion(Question question, ArrayList<String> prompts, String sessionKey,
TwiMLResponse twiml) {
addPrompts(prompts, question.getPreferred_language(), twiml);
Record record = new Record();
record.setAction(getAnswerUrl());
record.setMethod("GET");
// Set max voicemail length
//assign a default voice mail length if one is not specified
String voiceMessageLengthProperty = question.getMediaPropertyValue(MediumType.BROADSOFT,
MediaPropertyKey.VOICE_MESSAGE_LENGTH);
voiceMessageLengthProperty = voiceMessageLengthProperty != null ? voiceMessageLengthProperty : "15";
int length = 15;
try {
length = Integer.parseInt(voiceMessageLengthProperty);
}
catch (NumberFormatException e) {
log.warning("Failed to parse timeout for voicemail e: "+e.getMessage());
}
record.setMaxLength(length);
// Set timeout
String timeoutProperty = question.getMediaPropertyValue(MediumType.BROADSOFT,
MediaPropertyKey.TIMEOUT);
timeoutProperty = timeoutProperty != null ? timeoutProperty : "5";
int timeout = 5;
try {
timeout = Integer.parseInt(timeoutProperty);
}
catch (NumberFormatException e) {
log.warning("Failed to parse timeout for voicemail e: "+e.getMessage());
}
record.setTimeout(timeout);
// Set voicemail beep
String voiceMailBeep = question.getMediaPropertyValue(MediumType.BROADSOFT, MediaPropertyKey.VOICE_MESSAGE_BEEP);
voiceMailBeep = voiceMailBeep != null ? voiceMailBeep : "true";
boolean beep = Boolean.parseBoolean(voiceMailBeep);
record.setPlayBeep(beep);
try {
twiml.append(record);
Redirect redirect = new Redirect(getTimeoutUrl());
redirect.setMethod("GET");
twiml.append(redirect);
} catch (TwiMLException e) {
log.warning("Failed to append record");
}
}
protected String renderExitQuestion(Question question, ArrayList<String> prompts, String sessionKey) {
TwiMLResponse twiml = new TwiMLResponse();
addPrompts(prompts, question.getPreferred_language(), twiml);
try {
twiml.append(new Hangup());
} catch (TwiMLException e) {
log.warning("Failed to append hangup");
}
return twiml.toXML();
}
protected void addPrompts(ArrayList<String> prompts, String language, Verb twiml) {
String lang = language.contains("-") ? language : "nl-NL";
try {
for (String prompt : prompts) {
if (prompt.startsWith("http")) {
twiml.append(new Play(prompt));
}
else {
Say say = new Say(prompt.replace("text://", ""));
say.setLanguage(lang);
twiml.append(say);
}
}
}
catch (TwiMLException e) {
log.warning("failed to added prompts: " + e.getMessage());
}
}
private Response handleQuestion(Question question, AdapterConfig adapterConfig, String remoteID, String sessionKey) {
String result = (new TwiMLResponse()).toXML();
Return res = formQuestion(question, adapterConfig.getConfigId(),remoteID, null, sessionKey);
if (question != null && !question.getType().equalsIgnoreCase("comment"))
question = res.question;
Session session = Session.getSession(sessionKey);
// if the adapter is a trial adapter, add a introductory node
log.info("question formed at handleQuestion is: "+ ServerUtils.serializeWithoutException(question));
log.info("prompts formed at handleQuestion is: " + res.prompts);
if (question != null) {
question.generateIds();
session.setQuestion(question);
session.setRemoteAddress(remoteID);
session.storeSession();
if (question.getType().equalsIgnoreCase("closed")) {
result = renderClosedQuestion(question, res.prompts, sessionKey);
} else if (question.getType().equalsIgnoreCase("open")) {
result = renderOpenQuestion(question, res.prompts, sessionKey);
} else if (question.getType().equalsIgnoreCase("referral")) {
if (question.getUrl() != null
&& question.getUrl().startsWith("tel:")) {
// added for release0.4.2 to store the question in the
// session,
// for triggering an answered event
// Check with remoteID we are going to use for the call
String externalCallerId = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.USE_EXTERNAL_CALLERID );
Boolean callerId = false;
if(externalCallerId!=null) {
callerId = Boolean.parseBoolean(externalCallerId);
}
if(!callerId) {
remoteID = adapterConfig.getMyAddress();
}
log.info(String.format("current session key before referral is: %s and remoteId %s", sessionKey, remoteID));
String redirectedId = PhoneNumberUtils.formatNumber(question.getUrl().replace("tel:", ""), null);
if (redirectedId != null) {
// update url with formatted redirecteId. RFC3966
// returns format tel:<blabla> as expected
question.setUrl(redirectedId);
// store the remoteId as its lost while trying to
// trigger the answered event
HashMap<String, String> extras = new HashMap<String, String>();
extras.put("referredCalledId", redirectedId);
session.getExtras().putAll(extras);
session.setQuestion(question);
session.setRemoteAddress(remoteID);
// create a new ddr record and session to catch the
// redirect
Session referralSession = Session.getOrCreateSession(adapterConfig, remoteID, redirectedId);
if (session.getDirection() != null) {
DDRRecord ddrRecord = null;
try {
ddrRecord = DDRUtils.createDDRRecordOnOutgoingCommunication(adapterConfig, redirectedId, 1,question.getUrl());
if (ddrRecord != null) {
ddrRecord.addAdditionalInfo(Session.TRACKING_TOKEN_KEY,session.getTrackingToken());
ddrRecord.createOrUpdate();
referralSession.setDdrRecordId(ddrRecord.getId());
}
} catch (Exception e) {
e.printStackTrace();
log.severe(String.format("Continuing without DDR. Error: %s",e.toString()));
}
referralSession.setDirection(session.getDirection());
referralSession.setTrackingToken(session.getTrackingToken());
}
referralSession.setQuestion(session.getQuestion());
referralSession.getExtras().put("referralSessionKey", session.getKey());
referralSession.storeSession();
session.storeSession();
} else {
log.severe(String.format("Redirect address is invalid: %s. Ignoring.. ",question.getUrl().replace("tel:", "")));
}
result = renderReferral(question, res.prompts, sessionKey, remoteID);
}
} else if (question.getType().equalsIgnoreCase("exit")) {
result = renderExitQuestion(question, res.prompts, sessionKey);
} else if (res.prompts.size() > 0) {
result = renderComment(question, res.prompts, sessionKey);
}
} else if (res.prompts.size() > 0) {
result = renderComment(null, res.prompts, sessionKey);
} else {
log.info("Going to hangup? So clear Session?");
}
log.info("Sending xml: " + result);
return Response.status(Status.OK).type(MediaType.APPLICATION_XML).entity(result).build();
}
protected String getAnswerUrl() {
return "http://"+Settings.HOST+"/dialoghandler/rest/twilio/answer";
}
protected String getTimeoutUrl() {
return "http://"+Settings.HOST+"/dialoghandler/rest/twilio/timeout";
}
protected String getPreconnectUrl() {
return "http://"+Settings.HOST+"/dialoghandler/rest/twilio/preconnect";
}
}
| dialoghandler/src/main/java/com/almende/dialog/adapter/TwilioAdapter.java | package com.almende.dialog.adapter;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.logging.Logger;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import com.almende.dialog.LogLevel;
import com.almende.dialog.Settings;
import com.almende.dialog.accounts.AdapterConfig;
import com.almende.dialog.agent.AdapterAgent;
import com.almende.dialog.model.Answer;
import com.almende.dialog.model.MediaProperty.MediaPropertyKey;
import com.almende.dialog.model.MediaProperty.MediumType;
import com.almende.dialog.model.Question;
import com.almende.dialog.model.Session;
import com.almende.dialog.model.ddr.DDRRecord;
import com.almende.dialog.util.DDRUtils;
import com.almende.dialog.util.ServerUtils;
import com.almende.dialog.util.TimeUtils;
import com.askfast.commons.utils.PhoneNumberUtils;
import com.google.i18n.phonenumbers.PhoneNumberUtil.PhoneNumberFormat;
import com.twilio.sdk.TwilioRestClient;
import com.twilio.sdk.resource.factory.CallFactory;
import com.twilio.sdk.resource.instance.Account;
import com.twilio.sdk.resource.instance.Call;
import com.twilio.sdk.verbs.Dial;
import com.twilio.sdk.verbs.Gather;
import com.twilio.sdk.verbs.Hangup;
import com.twilio.sdk.verbs.Play;
import com.twilio.sdk.verbs.Record;
import com.twilio.sdk.verbs.Redirect;
import com.twilio.sdk.verbs.Say;
import com.twilio.sdk.verbs.TwiMLException;
import com.twilio.sdk.verbs.TwiMLResponse;
import com.twilio.sdk.verbs.Verb;
@Path("twilio")
public class TwilioAdapter {
protected static final Logger log = Logger.getLogger(VoiceXMLRESTProxy.class.getName());
protected static final com.almende.dialog.Logger dialogLog = new com.almende.dialog.Logger();
private static final int LOOP_DETECTION=10;
protected String TIMEOUT_URL="timeout";
//protected String EXCEPTION_URL="exception";
public static HashMap<String, String> dial(Map<String, String> addressNameMap, String url, String senderName,
AdapterConfig config, String applicationId) throws Exception {
HashMap<String, String> resultSessionMap = new HashMap<String, String>();
// If it is a broadcast don't provide the remote address because it is deceiving.
String loadAddress = "";
if (addressNameMap.size() == 1)
loadAddress = addressNameMap.keySet().iterator().next();
//fetch the question
Question question = Question.fromURL(url, config.getConfigId(), loadAddress, config.getMyAddress(), null, null);
for (String address : addressNameMap.keySet()) {
String formattedAddress = PhoneNumberUtils.formatNumber(address, PhoneNumberFormat.E164);
if (formattedAddress != null) {
//avoid multiple calls to be made to the same number, from the same adapter.
Session session = Session.getSession(Session.getSessionKey(config, formattedAddress));
if (session != null) {
// recreate a fresh session
session.drop();
session = Session.getOrCreateSession(config, formattedAddress);
}
else {
session = Session.getOrCreateSession(config, formattedAddress);
}
session.killed = false;
session.setStartUrl(url);
session.setDirection("outbound");
session.setRemoteAddress(formattedAddress);
session.setType(AdapterAgent.ADAPTER_TYPE_TWILIO);
session.setAdapterID(config.getConfigId());
session.setQuestion(question);
dialogLog.log(LogLevel.INFO, session.getAdapterConfig(), String.format("Outgoing call requested from: %s to: %s",
session.getLocalAddress(),
formattedAddress), session);
String extSession = "";
if (!ServerUtils.isInUnitTestingEnvironment()) {
String accountSid = config.getAccessToken();
String authToken = config.getAccessTokenSecret();
TwilioRestClient twilio = new TwilioRestClient(accountSid, authToken);
// Get the main account (The one we used to authenticate the client)
Account mainAccount = twilio.getAccount();
// Make a call
CallFactory callFactory = mainAccount.getCallFactory();
Map<String, String> callParams = new HashMap<String, String>();
callParams.put("To", formattedAddress); // Replace with a valid phone number
callParams.put("From", config.getMyAddress()); // Replace with a valid phone
// number in your account
callParams.put("ApplicationSid", applicationId);
//callParams.put("Url", "http://" + Settings.HOST + "/dialoghandler/rest/twilio/new");
callParams.put("StatusCallback", "http://" + Settings.HOST + "/dialoghandler/rest/twilio/cc");
callParams.put("StatusCallbackMethod", "GET");
callParams.put("IfMachine", "Hangup");
callParams.put("Record", "false");
Call call = callFactory.create(callParams);
System.out.println(call.getSid());
extSession = call.getSid();
}
session.setExternalSession(extSession);
session.storeSession();
resultSessionMap.put(formattedAddress, session.getKey());
}
else {
resultSessionMap.put(address, "Invalid address");
log.severe(String.format("To address is invalid: %s. Ignoring.. ", address));
}
}
return resultSessionMap;
}
@Path("new")
@GET
@Produces("application/xml")
public Response getNewDialog(@QueryParam("CallSid") String CallSid,
@QueryParam("AccountSid") String AccountSid,
@QueryParam("From") String localID,
@QueryParam("To") String remoteID,
@QueryParam("Direction") String direction) {
log.info("call started:"+direction+":"+remoteID+":"+localID);
if (direction.equals("inbound")) {
String tmpLocalId = new String(localID);
localID = new String(remoteID);
remoteID = tmpLocalId;
}
AdapterConfig config = AdapterConfig.findAdapterConfig(AdapterAgent.ADAPTER_TYPE_TWILIO, localID);
String formattedRemoteId = PhoneNumberUtils.formatNumber(remoteID, null);
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO+"|"+localID+"|"+ formattedRemoteId;
Session session = Session.getSession(sessionKey);
String url = "";
if ( session != null && direction.startsWith("outbound")) {
url = session.getStartUrl();
dialogLog.log(LogLevel.INFO, config, String
.format("Trying to fetch dialog for %s, due to outgoing Call from: %s ",
formattedRemoteId, config.getMyAddress()), session);
}
else if(direction.equals("inbound")) {
//create a session for incoming only
session = Session.getSession(Session.getSessionKey(config, formattedRemoteId));
if(session != null) {
session.drop();
}
session = Session.getOrCreateSession(config, formattedRemoteId);
}
if(session != null) {
session.setStartUrl( url );
session.setDirection( direction );
session.setRemoteAddress( formattedRemoteId );
session.setType( AdapterAgent.ADAPTER_TYPE_TWILIO );
session.setAccountId( config.getOwner() );
session.setAdapterID( config.getConfigId() );
}
else {
log.severe(String.format("Session %s not found", sessionKey));
return null;
}
Question question = session.getQuestion();
if(question == null) {
question = Question.fromURL(url, session.getAdapterConfig().getConfigId(), formattedRemoteId, localID,
session.getDdrRecordId(), session.getKey());
}
session.setQuestion(question);
if (session.getQuestion() != null) {
//create ddr record
DDRRecord ddrRecord = null;
try {
if (direction.contains("outbound")) {
ddrRecord = DDRUtils.createDDRRecordOnOutgoingCommunication(config, formattedRemoteId, 1, url);
}
else {
ddrRecord = DDRUtils.createDDRRecordOnIncomingCommunication(config, formattedRemoteId, 1, url);
}
session.setDdrRecordId( ddrRecord != null ? ddrRecord.getId() : null);
ddrRecord.addAdditionalInfo(Session.TRACKING_TOKEN_KEY, session.getTrackingToken());
}
catch (Exception e) {
String errorMessage = String.format("Creating DDR records failed. Direction: %s for adapterId: %s with address: %s remoteId: %s and localId: %s",
direction, config.getConfigId(), config.getMyAddress(), formattedRemoteId,
localID);
log.severe(errorMessage);
dialogLog.severe(config.getConfigId(), errorMessage, ddrRecord != null ? ddrRecord.getId() : null,
sessionKey);
}
finally {
ddrRecord.createOrUpdate();
session.storeSession();
}
return handleQuestion( question, config, formattedRemoteId, sessionKey );
}
else {
return Response.ok().build();
}
}
@Path("new")
@POST
@Produces("application/xml")
public Response getNewDialogPost(@FormParam("CallSid") String CallSid,
@FormParam("AccountSid") String AccountSid,
@FormParam("From") String localID,
@FormParam("To") String remoteID,
@FormParam("Direction") String direction) {
log.info("call started:"+direction+":"+remoteID+":"+localID);
//swap the remote and the local numbers if its inbound
if(direction.equals("inbound")) {
String tmpLocalId = new String(localID);
localID = new String(remoteID);
remoteID = tmpLocalId;
}
AdapterConfig config = AdapterConfig.findAdapterConfig(AdapterAgent.ADAPTER_TYPE_TWILIO, localID);
String formattedRemoteId = PhoneNumberUtils.formatNumber(remoteID, null);
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO+"|"+localID+"|"+ formattedRemoteId;
Session session = Session.getSession(sessionKey);
String url = "";
if ( session != null && direction.startsWith("outbound")) {
url = session.getStartUrl();
dialogLog.log(LogLevel.INFO, config, String
.format("Trying to fetch dialog for %s, due to outgoing Call from: %s ",
formattedRemoteId, config.getMyAddress()), session);
}
else if(direction.equals("inbound")) {
//create a session for incoming only
session = Session.getSession(Session.getSessionKey(config, formattedRemoteId));
if(session != null) {
session.drop();
}
session = Session.getOrCreateSession(config, formattedRemoteId);
url = config.getURLForInboundScenario();
}
if(session != null) {
session.setStartUrl( url );
session.setDirection( direction );
session.setRemoteAddress( formattedRemoteId );
session.setType( AdapterAgent.ADAPTER_TYPE_TWILIO );
session.setAccountId( config.getOwner() );
session.setAdapterID( config.getConfigId() );
}
else {
log.severe(String.format("Session %s not found", sessionKey));
return null;
}
Question question = session.getQuestion();
if(question == null) {
question = Question.fromURL(url, session.getAdapterConfig().getConfigId(), formattedRemoteId, localID,
session.getDdrRecordId(), session.getKey());
}
session.setQuestion(question);
if (session.getQuestion() != null) {
//create ddr record
DDRRecord ddrRecord = null;
try {
if (direction.contains("outbound")) {
ddrRecord = DDRUtils.createDDRRecordOnOutgoingCommunication(config, formattedRemoteId, 1, url);
}
else {
ddrRecord = DDRUtils.createDDRRecordOnIncomingCommunication(config, formattedRemoteId, 1, url);
}
session.setDdrRecordId( ddrRecord != null ? ddrRecord.getId() : null);
ddrRecord.addAdditionalInfo(Session.TRACKING_TOKEN_KEY, session.getTrackingToken());
}
catch (Exception e) {
String errorMessage = String.format("Creating DDR records failed. Direction: %s for adapterId: %s with address: %s remoteId: %s and localId: %s",
direction, config.getConfigId(), config.getMyAddress(), formattedRemoteId,
localID);
log.severe(errorMessage);
dialogLog.severe(config.getConfigId(), errorMessage, ddrRecord != null ? ddrRecord.getId() : null,
sessionKey);
}
finally {
ddrRecord.createOrUpdate();
session.storeSession();
}
return handleQuestion( question, config, formattedRemoteId, sessionKey );
}
else {
return Response.ok().build();
}
}
@Path("answer")
@GET
@Produces("application/xml")
public Response answer(@QueryParam("answerId") String answer_id, @QueryParam("Digits") String answer_input,
@QueryParam("From") String localID, @QueryParam("To") String remoteID,
@QueryParam("Direction") String direction, @QueryParam("RecordingUrl") String recordingUrl,
@QueryParam("DialCallStatus") String dialCallStatus, @QueryParam("DialCallSid") String dialCallSid) {
TwiMLResponse twiml = new TwiMLResponse();
try {
answer_input = answer_input != null ? URLDecoder.decode(answer_input, "UTF-8") : answer_input;
}
catch (UnsupportedEncodingException e) {
log.warning(String.format("Answer input decode failed for: %s", answer_input));
}
if(recordingUrl!=null) {
answer_input= recordingUrl.replace(".wav", "") + ".wav";
}
if (direction.equals("inbound")) {
String tmpLocalId = new String(localID);
localID = new String(remoteID);
remoteID = tmpLocalId;
}
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO + "|" + localID + "|" + remoteID;
Session session = Session.getSession(sessionKey);
List<String> callIgnored = Arrays.asList("no-answer", "busy", "canceled", "failed");
// Remove the referralSession
if ("completed".equals(dialCallStatus)) {
AdapterConfig config = session.getAdapterConfig();
finalizeCall(config, null, dialCallSid, null);
}
//if call is rejected. call the hangup event
else if (callIgnored.contains(dialCallStatus) && session != null && session.getQuestion() != null) {
Map<String, String> extras = session.getExtras();
extras.put("requester", session.getLocalAddress());
Question noAnswerQuestion = session.getQuestion().event("hangup", "Call rejected", extras, remoteID);
AdapterConfig config = session.getAdapterConfig();
finalizeCall(config, null, dialCallSid, null);
return handleQuestion(noAnswerQuestion, session.getAdapterConfig(), remoteID, sessionKey);
}
if (session != null) {
Question question = session.getQuestion();
if (question != null) {
String responder = session.getRemoteAddress();
if (session.killed) {
log.warning("session is killed");
return Response.status(Response.Status.BAD_REQUEST).build();
}
if (question.getType() != null && !question.getType().equalsIgnoreCase("comment")) {
dialogLog.log(LogLevel.INFO,
session.getAdapterConfig(),
String.format("Answer input: %s from: %s to question: %s", answer_input,
session.getRemoteAddress(), question.getQuestion_expandedtext()),
session);
}
String answerForQuestion = question.getQuestion_expandedtext();
question = question.answer(responder, session.getAdapterConfig().getConfigId(), answer_id,
answer_input, sessionKey);
//reload the session
session = Session.getSession(sessionKey);
session.setQuestion(question);
session.storeSession();
//check if ddr is in session. save the answer in the ddr
if (session.getDdrRecordId() != null) {
try {
DDRRecord ddrRecord = DDRRecord.getDDRRecord(session.getDdrRecordId(), session.getAccountId());
if (ddrRecord != null) {
ddrRecord.addAdditionalInfo(DDRRecord.ANSWER_INPUT_KEY + ":" + answerForQuestion,
answer_input);
ddrRecord.createOrUpdateWithLog();
}
}
catch (Exception e) {
e.printStackTrace();
}
}
return handleQuestion(question, session.getAdapterConfig(), responder, sessionKey);
}
else {
log.warning("No question found in session!");
}
}
else {
log.warning("No session found for: " + sessionKey);
dialogLog.severe(null, "No session found!", session);
}
String reply = twiml.toXML();
return Response.ok(reply).build();
}
@Path("timeout")
@GET
@Produces("application/xml")
public Response timeout(@QueryParam("From") String localID, @QueryParam("To") String remoteID,
@QueryParam("Direction") String direction) throws Exception {
//swap local and remote ids if its an incoming call
if (direction.equals("inbound")) {
String tmpLocalId = new String(localID);
localID = new String(remoteID);
remoteID = tmpLocalId;
}
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO+"|"+localID+"|"+ remoteID;
Session session = Session.getSession(sessionKey);
if (session != null) {
Question question = session.getQuestion();
String responder = session.getRemoteAddress();
if (session.killed) {
return Response.status(Response.Status.BAD_REQUEST).build();
}
dialogLog.log(LogLevel.INFO,
session.getAdapterConfig(),
String.format("Timeout from: %s for question: %s", responder,
question.getQuestion_expandedtext()), session);
HashMap<String, Object> extras = new HashMap<String, Object>();
extras.put("sessionKey", sessionKey);
extras.put("requester", session.getLocalAddress());
question = question.event("timeout", "No answer received", extras, responder);
session.setQuestion(question);
if (question != null) {
String retryLimit = question.getMediaPropertyValue(MediumType.BROADSOFT, MediaPropertyKey.RETRY_LIMIT);
retryLimit = retryLimit != null ? retryLimit : String.valueOf(Question.DEFAULT_MAX_QUESTION_LOAD);
Integer retryCount = session.getRetryCount();
retryCount = retryCount != null ? retryCount : 0;
if (retryCount < Integer.parseInt(retryLimit)) {
session.setRetryCount(++retryCount);
}
else {
//hangup so set question to null
question = null;
}
}
else {
log.warning("No question found for this session :" + sessionKey);
}
session.storeSession();
return handleQuestion(question, session.getAdapterConfig(), responder, sessionKey);
}
else {
log.warning("Strange that no session is found for: " + sessionKey);
}
TwiMLResponse twiml = new TwiMLResponse();
String reply = twiml.toXML();
return Response.ok(reply).build();
}
@Path("preconnect")
@GET
@Produces("application/voicexml+xml")
public Response preconnect(@QueryParam("From") String localID, @QueryParam("To") String remoteID,
@QueryParam("Direction") String direction) {
// TODO: test
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO+"|"+localID+"|"+ remoteID;
String reply = (new TwiMLResponse()).toXML();
Session session = Session.getSession(sessionKey);
if (session != null && session.getQuestion() != null) {
Question question = session.getQuestion();
String responder = session.getRemoteAddress();
if (session.killed) {
return Response.status(Response.Status.BAD_REQUEST).build();
}
dialogLog.log(LogLevel.INFO,
session.getAdapterConfig(),
String.format("Wrong answer received from: %s for question: %s", responder,
question.getQuestion_expandedtext()), session);
HashMap<String, String> extras = new HashMap<String, String>();
extras.put("sessionKey", sessionKey);
extras.put("requester", session.getLocalAddress());
question = question.event("preconnect", "Wrong answer received", extras, responder);
//reload the session
session = Session.getSession(sessionKey);
session.setQuestion(question);
session.storeSession();
return handleQuestion(question, session.getAdapterConfig(), responder, sessionKey);
}
return Response.ok(reply).build();
}
@Path("cc")
@GET
public Response receiveCCMessage(@QueryParam( "CallSid" ) String callSid,
@QueryParam( "From" ) String localID,
@QueryParam( "To" ) String remoteID,
@QueryParam( "Direction" ) String direction,
@QueryParam( "CallStatus" ) String status) {
if(direction.equals("outbound-api")) {
direction = "outbound";
}
if(direction.equals("inbound")) {
String tmpLocalId = localID;
localID = remoteID;
remoteID = tmpLocalId;
}
AdapterConfig config = AdapterConfig.findAdapterConfig(AdapterAgent.ADAPTER_TYPE_TWILIO, localID);
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO + "|" + config.getMyAddress() +
"|" + remoteID;
Session session = Session.getSession(sessionKey);
if (session != null) {
//update session with call timings
if (status.equals("completed")) {
finalizeCall(config, session, callSid, remoteID);
}
}
log.info("Session key: " + sessionKey);
return Response.ok("").build();
}
/**
* Retrieve call information and with that:
* - update ddr record
* - destroy session
* - send hangup
* @param config
* @param session
* @param callSid
* @param direction
* @param remoteID
*/
private void finalizeCall(AdapterConfig config, Session session, String callSid, String remoteID) {
String accountSid = config.getAccessToken();
String authToken = config.getAccessTokenSecret();
TwilioRestClient client = new TwilioRestClient(accountSid, authToken);
Call call = client.getAccount().getCall(callSid);
if(session==null) {
String localAddress = call.getFrom();
remoteID = call.getTo();
String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO + "|" + localAddress + "|" + remoteID;
session = Session.getSession(sessionKey);
}
if(session!=null) {
log.info("Finalizing call for: "+session.getKey());
String pattern = "EEE, dd MMM yyyy HH:mm:ss Z";
SimpleDateFormat format = new SimpleDateFormat(pattern, Locale.ENGLISH);
String direction = call.getDirection() != null && call.getDirection().equalsIgnoreCase("outbound-dial") ? "outbound"
: "inbound";
Long startTime = 0L;
Long answerTime = 0L;
Long endTime = 0L;
try {
String created = call.getProperty("date_created");
startTime = format.parse(created).getTime();
endTime = format.parse(call.getEndTime()).getTime();
answerTime = call.getDuration().equals("0") ? endTime : format.parse(call.getStartTime()).getTime();
//sometimes answerTimeStamp is only given in the ACTIVE ccxml
session.setAnswerTimestamp(answerTime.toString());
session.setStartTimestamp(startTime+"");
session.setReleaseTimestamp(endTime+"");
session.setDirection(direction);
session.setRemoteAddress(remoteID);
session.setLocalAddress(config.getMyAddress());
session.storeSession();
//flush the keys if ddrProcessing was successful
if (DDRUtils.stopDDRCosts(session.getKey(), true)) {
session.drop();
}
hangup(session);
} catch (Exception e) {
e.printStackTrace();
}
} else {
log.warning("Failed to finalize call because no session was found for: " + callSid);
}
}
/**
* hang up a call based on the session.
*
* @param session if null, doesnt trigger an hangup event. Also expects a question to be there in this session, or atleast a
* startURL from where the question can be fetched.
* @return
* @throws Exception
*/
public Response hangup(Session session) throws Exception {
if (session != null) {
log.info("call hangup with:" + session.getDirection() + ":" + session.getRemoteAddress() + ":" +
session.getLocalAddress());
if (session.getQuestion() == null) {
Question question = Question.fromURL(session.getStartUrl(), session.getAdapterConfig().getConfigId(),
session.getRemoteAddress(), session.getLocalAddress(),
session.getDdrRecordId(), session.getKey());
session.setQuestion(question);
}
if (session.getQuestion() != null && !isEventTriggered("hangup", session)) {
HashMap<String, Object> timeMap = getTimeMap(session.getStartTimestamp(), session.getAnswerTimestamp(),
session.getReleaseTimestamp());
timeMap.put("referredCalledId", session.getExtras().get("referredCalledId"));
timeMap.put("sessionKey", session.getKey());
if(session.getExtras() != null && !session.getExtras().isEmpty()) {
timeMap.putAll(session.getExtras());
}
Response hangupResponse = handleQuestion(null, session.getAdapterConfig(), session.getRemoteAddress(),
session.getKey());
timeMap.put("requester", session.getLocalAddress());
session.getQuestion().event("hangup", "Hangup", timeMap, session.getRemoteAddress());
dialogLog.log(LogLevel.INFO, session.getAdapterConfig(),
String.format("Call hungup from: %s", session.getRemoteAddress()), session);
return hangupResponse;
}
else {
log.info("no question received");
}
}
return Response.ok("").build();
}
/**
* @param startTime
* @param answerTime
* @param releaseTime
* @return
*/
private HashMap<String, Object> getTimeMap( String startTime, String answerTime, String releaseTime )
{
HashMap<String, Object> timeMap = new HashMap<String, Object>();
timeMap.put( "startTime", startTime );
timeMap.put( "answerTime", answerTime );
timeMap.put( "releaseTime", releaseTime );
return timeMap;
}
/**
* check if for this session an
* @param eventName
* @param session
* @return
*/
private static boolean isEventTriggered(String eventName, Session session) {
if (session != null) {
if (session.getExtras().get("event_" + eventName) != null) {
String timestamp = TimeUtils.getStringFormatFromDateTime(Long.parseLong(session.getExtras()
.get("event_" + eventName)), null);
log.warning(eventName + "event already triggered before for this session at: " + timestamp);
return true;
}
else {
session.getExtras().put("event_" + eventName, String.valueOf(TimeUtils.getServerCurrentTimeInMillis()));
session.storeSession();
}
}
return false;
}
public class Return {
ArrayList<String> prompts;
Question question;
public Return(ArrayList<String> prompts, Question question) {
this.prompts = prompts;
this.question = question;
}
}
public Return formQuestion(Question question, String adapterID, String address, String ddrRecordId,
String sessionKey) {
ArrayList<String> prompts = new ArrayList<String>();
for (int count = 0; count <= LOOP_DETECTION; count++) {
if (question == null)
break;
log.info("Going to form question of type: " + question.getType());
if (question.getType() == null) {
question = null;
break;
}
String preferred_language = question.getPreferred_language();
question.setPreferred_language(preferred_language);
String qText = question.getQuestion_text();
if (qText != null && !qText.equals("")) {
prompts.add(qText);
}
if (question.getType().equalsIgnoreCase("closed")) {
for (Answer ans : question.getAnswers()) {
String answer = ans.getAnswer_text();
if (answer != null && !answer.equals("") && !answer.startsWith("dtmfKey://")) {
prompts.add(answer);
}
}
break; //Jump from forloop
}
else if (question.getType().equalsIgnoreCase("comment")) {
//question = question.answer(null, adapterID, null, null);
break;
}
else if (question.getType().equalsIgnoreCase("referral")) {
if (question.getUrl() != null && !question.getUrl().startsWith("tel:")) {
question = Question.fromURL(question.getUrl(), adapterID, address, ddrRecordId, sessionKey);
//question = question.answer(null, null, null);
// break;
}
else {
// Break out because we are going to reconnect
break;
}
}
else {
break; //Jump from forloop (open questions, etc.)
}
}
return new Return(prompts, question);
}
protected String renderComment(Question question, ArrayList<String> prompts, String sessionKey) {
TwiMLResponse twiml = new TwiMLResponse();
try {
addPrompts(prompts, question.getPreferred_language(), twiml);
Redirect redirect = new Redirect(getAnswerUrl());
redirect.setMethod("GET");
twiml.append(redirect);
}
catch (TwiMLException e) {
e.printStackTrace();
}
return twiml.toXML();
}
protected String renderReferral(Question question,ArrayList<String> prompts, String sessionKey, String remoteID){
TwiMLResponse twiml = new TwiMLResponse();
try {
addPrompts(prompts, question.getPreferred_language(), twiml);
String redirectTimeoutProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TIMEOUT );
String redirectTimeout = redirectTimeoutProperty != null ? redirectTimeoutProperty.replace("s", "") : "30";
int timeout = 30;
try {
timeout = Integer.parseInt(redirectTimeout);
}
catch (NumberFormatException e) {
e.printStackTrace();
}
com.twilio.sdk.verbs.Number number = new com.twilio.sdk.verbs.Number(question.getUrl());
String usePreconnect = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.USE_PRECONNECT );
usePreconnect = usePreconnect != null ? usePreconnect : "false";
boolean preconnect = Boolean.parseBoolean(usePreconnect);
if(preconnect) {
number.setMethod("GET");
number.setUrl(getPreconnectUrl());
}
Dial dial = new Dial();
dial.setCallerId(remoteID);
dial.append(number);
dial.setTimeout(timeout);
dial.setMethod("GET");
dial.setAction(getAnswerUrl());
twiml.append(dial);
}catch(TwiMLException e ) {
log.warning("Failed to create referal");
}
return twiml.toXML();
}
protected String renderClosedQuestion(Question question, ArrayList<String> prompts, String sessionKey) {
try {
sessionKey = URLEncoder.encode(sessionKey, "UTF-8");
}
catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
TwiMLResponse twiml = new TwiMLResponse();
Gather gather = new Gather();
gather.setAction(getAnswerUrl());
gather.setMethod("GET");
gather.setNumDigits(1);
String noAnswerTimeout = question.getMediaPropertyValue(MediumType.BROADSOFT, MediaPropertyKey.TIMEOUT);
boolean useHash = true;
if(question.getAnswers().size() > 11) {
useHash = false;
}
else {
List<Answer> answers = question.getAnswers();
for (Answer answer : answers) {
if (answer != null && answer.getAnswer_text() != null &&
answer.getAnswer_text().startsWith("dtmfKey://#")) {
useHash = true;
break;
}
}
}
//assign a default timeout if one is not specified
noAnswerTimeout = noAnswerTimeout != null ? noAnswerTimeout : "5";
if (noAnswerTimeout.endsWith("s")) {
log.warning("No answer timeout must end with 's'. E.g. 10s. Found: " + noAnswerTimeout);
noAnswerTimeout = noAnswerTimeout.replace("s", "");
}
int timeout = 5;
try {
timeout = Integer.parseInt(noAnswerTimeout);
}
catch (NumberFormatException e) {
e.printStackTrace();
}
gather.setTimeout(timeout);
if(useHash) {
gather.setFinishOnKey("");
}
try {
addPrompts(prompts, question.getPreferred_language(), gather);
twiml.append(gather);
Redirect redirect = new Redirect(getTimeoutUrl());
redirect.setMethod("GET");
twiml.append(redirect);
}
catch (TwiMLException e) {
e.printStackTrace();
}
return twiml.toXML();
}
protected String renderOpenQuestion(Question question,ArrayList<String> prompts,String sessionKey) {
TwiMLResponse twiml = new TwiMLResponse();
String typeProperty = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TYPE );
if(typeProperty!=null && typeProperty.equalsIgnoreCase("audio")) {
renderVoiceMailQuestion(question, prompts, sessionKey, twiml);
} else {
Gather gather = new Gather();
gather.setAction(getAnswerUrl());
gather.setMethod("GET");
String dtmfMaxLength = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.ANSWER_INPUT_MAX_LENGTH );
if(dtmfMaxLength!=null) {
try {
int digits = Integer.parseInt(dtmfMaxLength);
gather.setNumDigits(digits);
}
catch (NumberFormatException e) {
e.printStackTrace();
}
}
String noAnswerTimeout = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.TIMEOUT );
//assign a default timeout if one is not specified
noAnswerTimeout = noAnswerTimeout != null ? noAnswerTimeout : "5";
if (noAnswerTimeout.endsWith("s")) {
log.warning("No answer timeout must end with 's'. E.g. 10s. Found: " + noAnswerTimeout);
noAnswerTimeout = noAnswerTimeout.replace("s", "");
}
int timeout = 5;
try {
timeout = Integer.parseInt(noAnswerTimeout);
}
catch (NumberFormatException e) {
e.printStackTrace();
}
gather.setTimeout(timeout);
try {
addPrompts(prompts, question.getPreferred_language(), gather);
twiml.append(gather);
Redirect redirect = new Redirect(getTimeoutUrl());
redirect.setMethod("GET");
twiml.append(redirect);
}
catch (TwiMLException e) {
e.printStackTrace();
}
}
return twiml.toXML();
}
/** renders/updates the xml for recording an audio and posts it to the user on the callback
* @param question
* @param prompts
* @param sessionKey
* @param outputter
* @throws IOException
* @throws UnsupportedEncodingException
*/
protected void renderVoiceMailQuestion(Question question, ArrayList<String> prompts, String sessionKey,
TwiMLResponse twiml) {
addPrompts(prompts, question.getPreferred_language(), twiml);
Record record = new Record();
record.setAction(getAnswerUrl());
record.setMethod("GET");
// Set max voicemail length
//assign a default voice mail length if one is not specified
String voiceMessageLengthProperty = question.getMediaPropertyValue(MediumType.BROADSOFT,
MediaPropertyKey.VOICE_MESSAGE_LENGTH);
voiceMessageLengthProperty = voiceMessageLengthProperty != null ? voiceMessageLengthProperty : "15";
int length = 15;
try {
length = Integer.parseInt(voiceMessageLengthProperty);
}
catch (NumberFormatException e) {
log.warning("Failed to parse timeout for voicemail e: "+e.getMessage());
}
record.setMaxLength(length);
// Set timeout
String timeoutProperty = question.getMediaPropertyValue(MediumType.BROADSOFT,
MediaPropertyKey.TIMEOUT);
timeoutProperty = timeoutProperty != null ? timeoutProperty : "5";
int timeout = 5;
try {
timeout = Integer.parseInt(timeoutProperty);
}
catch (NumberFormatException e) {
log.warning("Failed to parse timeout for voicemail e: "+e.getMessage());
}
record.setTimeout(timeout);
// Set voicemail beep
String voiceMailBeep = question.getMediaPropertyValue(MediumType.BROADSOFT, MediaPropertyKey.VOICE_MESSAGE_BEEP);
voiceMailBeep = voiceMailBeep != null ? voiceMailBeep : "true";
boolean beep = Boolean.parseBoolean(voiceMailBeep);
record.setPlayBeep(beep);
try {
twiml.append(record);
Redirect redirect = new Redirect(getTimeoutUrl());
redirect.setMethod("GET");
twiml.append(redirect);
} catch (TwiMLException e) {
log.warning("Failed to append record");
}
}
protected String renderExitQuestion(Question question, ArrayList<String> prompts, String sessionKey) {
TwiMLResponse twiml = new TwiMLResponse();
addPrompts(prompts, question.getPreferred_language(), twiml);
try {
twiml.append(new Hangup());
} catch (TwiMLException e) {
log.warning("Failed to append hangup");
}
return twiml.toXML();
}
protected void addPrompts(ArrayList<String> prompts, String language, Verb twiml) {
String lang = language.contains("-") ? language : "nl-NL";
try {
for (String prompt : prompts) {
if (prompt.startsWith("http")) {
twiml.append(new Play(prompt));
}
else {
Say say = new Say(prompt.replace("text://", ""));
say.setLanguage(lang);
twiml.append(say);
}
}
}
catch (TwiMLException e) {
log.warning("failed to added prompts: " + e.getMessage());
}
}
private Response handleQuestion(Question question, AdapterConfig adapterConfig, String remoteID, String sessionKey) {
String result = (new TwiMLResponse()).toXML();
Return res = formQuestion(question, adapterConfig.getConfigId(),remoteID, null, sessionKey);
if (question != null && !question.getType().equalsIgnoreCase("comment"))
question = res.question;
Session session = Session.getSession(sessionKey);
// if the adapter is a trial adapter, add a introductory node
log.info("question formed at handleQuestion is: "+ ServerUtils.serializeWithoutException(question));
log.info("prompts formed at handleQuestion is: " + res.prompts);
if (question != null) {
question.generateIds();
session.setQuestion(question);
session.setRemoteAddress(remoteID);
session.storeSession();
if (question.getType().equalsIgnoreCase("closed")) {
result = renderClosedQuestion(question, res.prompts, sessionKey);
} else if (question.getType().equalsIgnoreCase("open")) {
result = renderOpenQuestion(question, res.prompts, sessionKey);
} else if (question.getType().equalsIgnoreCase("referral")) {
if (question.getUrl() != null
&& question.getUrl().startsWith("tel:")) {
// added for release0.4.2 to store the question in the
// session,
// for triggering an answered event
// Check with remoteID we are going to use for the call
String externalCallerId = question.getMediaPropertyValue( MediumType.BROADSOFT, MediaPropertyKey.USE_EXTERNAL_CALLERID );
Boolean callerId = false;
if(externalCallerId!=null) {
callerId = Boolean.parseBoolean(externalCallerId);
}
if(!callerId) {
remoteID = adapterConfig.getMyAddress();
}
log.info(String.format("current session key before referral is: %s and remoteId %s", sessionKey, remoteID));
String redirectedId = PhoneNumberUtils.formatNumber(question.getUrl().replace("tel:", ""), null);
if (redirectedId != null) {
// update url with formatted redirecteId. RFC3966
// returns format tel:<blabla> as expected
question.setUrl(redirectedId);
// store the remoteId as its lost while trying to
// trigger the answered event
HashMap<String, String> extras = new HashMap<String, String>();
extras.put("referredCalledId", redirectedId);
session.getExtras().putAll(extras);
session.setQuestion(question);
session.setRemoteAddress(remoteID);
// create a new ddr record and session to catch the
// redirect
Session referralSession = Session.getOrCreateSession(adapterConfig, remoteID, redirectedId);
if (session.getDirection() != null) {
DDRRecord ddrRecord = null;
try {
ddrRecord = DDRUtils.createDDRRecordOnOutgoingCommunication(adapterConfig, redirectedId, 1,question.getUrl());
if (ddrRecord != null) {
ddrRecord.addAdditionalInfo(Session.TRACKING_TOKEN_KEY,session.getTrackingToken());
ddrRecord.createOrUpdate();
referralSession.setDdrRecordId(ddrRecord.getId());
}
} catch (Exception e) {
e.printStackTrace();
log.severe(String.format("Continuing without DDR. Error: %s",e.toString()));
}
referralSession.setDirection(session.getDirection());
referralSession.setTrackingToken(session.getTrackingToken());
}
referralSession.setQuestion(session.getQuestion());
referralSession.getExtras().put("referralSessionKey", session.getKey());
referralSession.storeSession();
session.storeSession();
} else {
log.severe(String.format("Redirect address is invalid: %s. Ignoring.. ",question.getUrl().replace("tel:", "")));
}
result = renderReferral(question, res.prompts, sessionKey, remoteID);
}
} else if (question.getType().equalsIgnoreCase("exit")) {
result = renderExitQuestion(question, res.prompts, sessionKey);
} else if (res.prompts.size() > 0) {
result = renderComment(question, res.prompts, sessionKey);
}
} else if (res.prompts.size() > 0) {
result = renderComment(null, res.prompts, sessionKey);
} else {
log.info("Going to hangup? So clear Session?");
}
log.info("Sending xml: " + result);
return Response.status(Status.OK).type(MediaType.APPLICATION_XML).entity(result).build();
}
protected String getAnswerUrl() {
return "http://"+Settings.HOST+"/dialoghandler/rest/twilio/answer";
}
protected String getTimeoutUrl() {
return "http://"+Settings.HOST+"/dialoghandler/rest/twilio/timeout";
}
protected String getPreconnectUrl() {
return "http://"+Settings.HOST+"/dialoghandler/rest/twilio/preconnect";
}
}
| update: change preconnect event message
| dialoghandler/src/main/java/com/almende/dialog/adapter/TwilioAdapter.java | update: change preconnect event message | <ide><path>ialoghandler/src/main/java/com/almende/dialog/adapter/TwilioAdapter.java
<ide> @GET
<ide> @Produces("application/voicexml+xml")
<ide> public Response preconnect(@QueryParam("From") String localID, @QueryParam("To") String remoteID,
<del> @QueryParam("Direction") String direction) {
<del>
<del> // TODO: test
<del> String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO+"|"+localID+"|"+ remoteID;
<del>
<add> @QueryParam("Direction") String direction) {
<add>
<add> String sessionKey = AdapterAgent.ADAPTER_TYPE_TWILIO + "|" + localID + "|" + remoteID;
<add>
<ide> String reply = (new TwiMLResponse()).toXML();
<ide> Session session = Session.getSession(sessionKey);
<ide> if (session != null && session.getQuestion() != null) {
<ide> HashMap<String, String> extras = new HashMap<String, String>();
<ide> extras.put("sessionKey", sessionKey);
<ide> extras.put("requester", session.getLocalAddress());
<del> question = question.event("preconnect", "Wrong answer received", extras, responder);
<add> question = question.event("preconnect", "preconnect event", extras, responder);
<ide> //reload the session
<ide> session = Session.getSession(sessionKey);
<ide> session.setQuestion(question); |
|
Java | mit | 1c6f3308fae7ab37f2daa9e792acfa5928e1a1fc | 0 | kazocsaba/matrix | package kcsaba.math.matrix.immutable;
import kcsaba.math.matrix.Matrix;
import kcsaba.math.matrix.Matrix2;
import kcsaba.math.matrix.Matrix3;
import kcsaba.math.matrix.Vector;
import kcsaba.math.matrix.Vector2;
import kcsaba.math.matrix.Vector3;
import kcsaba.math.matrix.Vector4;
/**
* Factory for creating immutable matrices.
* @author Kazó Csaba
*/
public final class ImmutableMatrixFactory {
/**
* Creates a new immutable 2D column vector.
* @param x the x coordinate of the new vector
* @param y the y coordinate of the new vector
* @return the new vector
*/
public static ImmutableVector2 createVector(final double x, final double y) {
return new ImmutableVector2(new ImmutableData() {
@Override
public double get(int row, int col) {
if (col!=0) throw new IndexOutOfBoundsException();
switch (row) {
case 0: return x;
case 1: return y;
default: throw new IndexOutOfBoundsException();
}
}
@Override
public int getColumnCount() {
return 1;
}
@Override
public int getRowCount() {
return 2;
}
});
}
/**
* Creates a new immutable 3D column vector.
* @param x the x coordinate of the new vector
* @param y the y coordinate of the new vector
* @param z the z coordinate of the new vector
* @return the new vector
*/
public static ImmutableVector3 createVector(final double x, final double y, final double z) {
return new ImmutableVector3(new ImmutableData() {
@Override
public double get(int row, int col) {
if (col!=0) throw new IndexOutOfBoundsException();
switch (row) {
case 0: return x;
case 1: return y;
case 2: return z;
default: throw new IndexOutOfBoundsException();
}
}
@Override
public int getColumnCount() {
return 1;
}
@Override
public int getRowCount() {
return 3;
}
});
}
/**
* Creates a new immutable 4D column vector.
* @param x the x coordinate of the new vector
* @param y the y coordinate of the new vector
* @param z the z coordinate of the new vector
* @param h the h coordinate of the new vector
* @return the new vector
*/
public static ImmutableVector4 createVector(final double x, final double y, final double z, final double h) {
return new ImmutableVector4(new ImmutableData() {
@Override
public double get(int row, int col) {
if (col!=0) throw new IndexOutOfBoundsException();
switch (row) {
case 0: return x;
case 1: return y;
case 2: return z;
case 3: return h;
default: throw new IndexOutOfBoundsException();
}
}
@Override
public int getColumnCount() {
return 1;
}
@Override
public int getRowCount() {
return 4;
}
});
}
/**
* Returns an immutable matrix with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableMatrix copy(Matrix source) {
if (source instanceof ImmutableMatrix) return (ImmutableMatrix) source;
if (source.getColumnCount() == 1) return copy((Vector) source);
if (source.getRowCount() == 2 && source.getColumnCount() == 2) return new ImmutableMatrix2((Matrix2) source);
if (source.getRowCount() == 3 && source.getColumnCount() == 3) return new ImmutableMatrix3((Matrix3) source);
return new ImmutableMatrix(source);
}
/**
* Returns an immutable matrix with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableMatrix2 copy(Matrix2 source) {
return new ImmutableMatrix2(source);
}
/**
* Returns an immutable matrix with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableMatrix3 copy(Matrix3 source) {
return new ImmutableMatrix3(source);
}
/**
* Returns an immutable vector with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableVector copy(Vector source) {
if (source instanceof ImmutableVector) return (ImmutableVector) source;
switch (source.getRowCount()) {
case 2: return copy((Vector2)source);
case 3: return copy((Vector3)source);
case 4: return copy((Vector4)source);
default: return new ImmutableVector(source);
}
}
/**
* Returns an immutable vector with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableVector2 copy(Vector2 source) {
if (source instanceof ImmutableVector2) return (ImmutableVector2) source;
return createVector(source.getX(), source.getY());
}
/**
* Returns an immutable vector with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableVector3 copy(Vector3 source) {
if (source instanceof ImmutableVector3) return (ImmutableVector3) source;
return createVector(source.getX(), source.getY(), source.getZ());
}
/**
* Returns an immutable vector with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableVector4 copy(Vector4 source) {
if (source instanceof ImmutableVector4) return (ImmutableVector4) source;
return createVector(source.getX(), source.getY(), source.getZ(), source.getH());
}
private static ImmutableMatrix create(ImmutableData data) {
if (data.getColumnCount() == 1) {
switch (data.getRowCount()) {
case 2: return new ImmutableVector2(data);
case 3: return new ImmutableVector3(data);
case 4: return new ImmutableVector4(data);
default: return new ImmutableVector(data);
}
}
if (data.getRowCount() == 2 && data.getColumnCount() == 2) return new ImmutableMatrix2(data);
if (data.getRowCount() == 3 && data.getColumnCount() == 3) return new ImmutableMatrix3(data);
return new ImmutableMatrix(data);
}
/**
* Returns an immutable matrix of the specified size whose elements are all 0.
* @param rows the number of rows
* @param cols the number of columns
* @return a <code>rows</code>×<code>cols</code> matrix whose elements are all 0
* @throws IllegalArgumentException if either argument is non-positive
*/
public static ImmutableMatrix zeros(final int rows, final int cols) {
if (rows<=0 || cols<=0) throw new IllegalArgumentException("Invalid dimensions");
return create(new ImmutableData() {
@Override
public double get(int row, int col) {
if (row<0 || row>=rows || col<0 || col>=cols) throw new IndexOutOfBoundsException();
return 0;
}
@Override
public int getColumnCount() {
return rows;
}
@Override
public int getRowCount() {
return cols;
}
});
}
/**
* Returns an immutable matrix of the specified size whose elements are all 1.
* @param rows the number of rows
* @param cols the number of columns
* @return a <code>rows</code>×<code>cols</code> matrix whose elements are all 1
* @throws IllegalArgumentException if either argument is non-positive
*/
public static ImmutableMatrix ones(final int rows, final int cols) {
if (rows<=0 || cols<=0) throw new IllegalArgumentException("Invalid dimensions");
return create(new ImmutableData() {
@Override
public double get(int row, int col) {
if (row<0 || row>=rows || col<0 || col>=cols) throw new IndexOutOfBoundsException();
return 1;
}
@Override
public int getColumnCount() {
return rows;
}
@Override
public int getRowCount() {
return cols;
}
});
}
/**
* Returns an immutable identity matrix of the specified dimension.
* @param size the dimension of the matrix
* @return a <code>size</code>×<code>size</code> identity matrix
* @throws IllegalArgumentException if the size is not positive
*/
public static ImmutableMatrix identity(final int size) {
if (size<=0) throw new IllegalArgumentException("Invalid size");
return create(new ImmutableData() {
@Override
public double get(int row, int col) {
if (row<0 || row>=size || col<0 || col>=size) throw new IndexOutOfBoundsException();
return row==col ? 1 : 0;
}
@Override
public int getColumnCount() {
return size;
}
@Override
public int getRowCount() {
return size;
}
});
}
}
| src/main/java/kcsaba/math/matrix/immutable/ImmutableMatrixFactory.java | package kcsaba.math.matrix.immutable;
import kcsaba.math.matrix.Matrix;
import kcsaba.math.matrix.Matrix2;
import kcsaba.math.matrix.Matrix3;
import kcsaba.math.matrix.Vector;
import kcsaba.math.matrix.Vector2;
import kcsaba.math.matrix.Vector3;
import kcsaba.math.matrix.Vector4;
/**
* Factory for creating immutable matrices.
* @author Kazó Csaba
*/
public final class ImmutableMatrixFactory {
/**
* Returns an immutable matrix with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableMatrix copy(Matrix source) {
if (source instanceof ImmutableMatrix) return (ImmutableMatrix) source;
if (source.getColumnCount() == 1) return copy((Vector) source);
if (source.getRowCount() == 2 && source.getColumnCount() == 2) return new ImmutableMatrix2((Matrix2) source);
if (source.getRowCount() == 3 && source.getColumnCount() == 3) return new ImmutableMatrix3((Matrix3) source);
return new ImmutableMatrix(source);
}
/**
* Returns an immutable matrix with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableMatrix2 copy(Matrix2 source) {
return new ImmutableMatrix2(source);
}
/**
* Returns an immutable matrix with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableMatrix3 copy(Matrix3 source) {
return new ImmutableMatrix3(source);
}
/**
* Returns an immutable vector with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableVector copy(Vector source) {
if (source instanceof ImmutableVector) return (ImmutableVector) source;
switch (source.getRowCount()) {
case 2: return new ImmutableVector2((Vector2)source);
case 3: return new ImmutableVector3((Vector3)source);
case 4: return new ImmutableVector4((Vector4)source);
default: return new ImmutableVector(source);
}
}
/**
* Returns an immutable vector with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableVector2 copy(Vector2 source) {
if (source instanceof ImmutableVector2) return (ImmutableVector2) source;
return new ImmutableVector2(source);
}
/**
* Returns an immutable vector with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableVector3 copy(Vector3 source) {
if (source instanceof ImmutableVector3) return (ImmutableVector3) source;
return new ImmutableVector3(source);
}
/**
* Returns an immutable vector with the same values as the argument.
* @throws NullPointerException if {@code source} is {@code null}
*/
public static ImmutableVector4 copy(Vector4 source) {
if (source instanceof ImmutableVector4) return (ImmutableVector4) source;
return new ImmutableVector4(source);
}
private static ImmutableMatrix create(ImmutableData data) {
if (data.getColumnCount() == 1) {
switch (data.getRowCount()) {
case 2: return new ImmutableVector2(data);
case 3: return new ImmutableVector3(data);
case 4: return new ImmutableVector4(data);
default: return new ImmutableVector(data);
}
}
if (data.getRowCount() == 2 && data.getColumnCount() == 2) return new ImmutableMatrix2(data);
if (data.getRowCount() == 3 && data.getColumnCount() == 3) return new ImmutableMatrix3(data);
return new ImmutableMatrix(data);
}
/**
* Returns an immutable matrix of the specified size whose elements are all 0.
* @param rows the number of rows
* @param cols the number of columns
* @return a <code>rows</code>×<code>cols</code> matrix whose elements are all 0
* @throws IllegalArgumentException if either argument is non-positive
*/
public static ImmutableMatrix zeros(final int rows, final int cols) {
if (rows<=0 || cols<=0) throw new IllegalArgumentException("Invalid dimensions");
return create(new ImmutableData() {
@Override
public double get(int row, int col) {
if (row<0 || row>=rows || col<0 || col>=cols) throw new IndexOutOfBoundsException();
return 0;
}
@Override
public int getColumnCount() {
return rows;
}
@Override
public int getRowCount() {
return cols;
}
});
}
/**
* Returns an immutable matrix of the specified size whose elements are all 1.
* @param rows the number of rows
* @param cols the number of columns
* @return a <code>rows</code>×<code>cols</code> matrix whose elements are all 1
* @throws IllegalArgumentException if either argument is non-positive
*/
public static ImmutableMatrix ones(final int rows, final int cols) {
if (rows<=0 || cols<=0) throw new IllegalArgumentException("Invalid dimensions");
return create(new ImmutableData() {
@Override
public double get(int row, int col) {
if (row<0 || row>=rows || col<0 || col>=cols) throw new IndexOutOfBoundsException();
return 1;
}
@Override
public int getColumnCount() {
return rows;
}
@Override
public int getRowCount() {
return cols;
}
});
}
/**
* Returns an immutable identity matrix of the specified dimension.
* @param size the dimension of the matrix
* @return a <code>size</code>×<code>size</code> identity matrix
* @throws IllegalArgumentException if the size is not positive
*/
public static ImmutableMatrix identity(final int size) {
if (size<=0) throw new IllegalArgumentException("Invalid size");
return create(new ImmutableData() {
@Override
public double get(int row, int col) {
if (row<0 || row>=size || col<0 || col>=size) throw new IndexOutOfBoundsException();
return row==col ? 1 : 0;
}
@Override
public int getColumnCount() {
return size;
}
@Override
public int getRowCount() {
return size;
}
});
}
}
| Add createVector variants to ImmutableMatrixFactory.
| src/main/java/kcsaba/math/matrix/immutable/ImmutableMatrixFactory.java | Add createVector variants to ImmutableMatrixFactory. | <ide><path>rc/main/java/kcsaba/math/matrix/immutable/ImmutableMatrixFactory.java
<ide> public final class ImmutableMatrixFactory {
<ide>
<ide> /**
<add> * Creates a new immutable 2D column vector.
<add> * @param x the x coordinate of the new vector
<add> * @param y the y coordinate of the new vector
<add> * @return the new vector
<add> */
<add> public static ImmutableVector2 createVector(final double x, final double y) {
<add> return new ImmutableVector2(new ImmutableData() {
<add>
<add> @Override
<add> public double get(int row, int col) {
<add> if (col!=0) throw new IndexOutOfBoundsException();
<add> switch (row) {
<add> case 0: return x;
<add> case 1: return y;
<add> default: throw new IndexOutOfBoundsException();
<add> }
<add> }
<add>
<add> @Override
<add> public int getColumnCount() {
<add> return 1;
<add> }
<add>
<add> @Override
<add> public int getRowCount() {
<add> return 2;
<add> }
<add> });
<add> }
<add>
<add> /**
<add> * Creates a new immutable 3D column vector.
<add> * @param x the x coordinate of the new vector
<add> * @param y the y coordinate of the new vector
<add> * @param z the z coordinate of the new vector
<add> * @return the new vector
<add> */
<add> public static ImmutableVector3 createVector(final double x, final double y, final double z) {
<add> return new ImmutableVector3(new ImmutableData() {
<add>
<add> @Override
<add> public double get(int row, int col) {
<add> if (col!=0) throw new IndexOutOfBoundsException();
<add> switch (row) {
<add> case 0: return x;
<add> case 1: return y;
<add> case 2: return z;
<add> default: throw new IndexOutOfBoundsException();
<add> }
<add> }
<add>
<add> @Override
<add> public int getColumnCount() {
<add> return 1;
<add> }
<add>
<add> @Override
<add> public int getRowCount() {
<add> return 3;
<add> }
<add> });
<add> }
<add>
<add> /**
<add> * Creates a new immutable 4D column vector.
<add> * @param x the x coordinate of the new vector
<add> * @param y the y coordinate of the new vector
<add> * @param z the z coordinate of the new vector
<add> * @param h the h coordinate of the new vector
<add> * @return the new vector
<add> */
<add> public static ImmutableVector4 createVector(final double x, final double y, final double z, final double h) {
<add> return new ImmutableVector4(new ImmutableData() {
<add>
<add> @Override
<add> public double get(int row, int col) {
<add> if (col!=0) throw new IndexOutOfBoundsException();
<add> switch (row) {
<add> case 0: return x;
<add> case 1: return y;
<add> case 2: return z;
<add> case 3: return h;
<add> default: throw new IndexOutOfBoundsException();
<add> }
<add> }
<add>
<add> @Override
<add> public int getColumnCount() {
<add> return 1;
<add> }
<add>
<add> @Override
<add> public int getRowCount() {
<add> return 4;
<add> }
<add> });
<add> }
<add>
<add> /**
<ide> * Returns an immutable matrix with the same values as the argument.
<ide> * @throws NullPointerException if {@code source} is {@code null}
<ide> */
<ide> public static ImmutableVector copy(Vector source) {
<ide> if (source instanceof ImmutableVector) return (ImmutableVector) source;
<ide> switch (source.getRowCount()) {
<del> case 2: return new ImmutableVector2((Vector2)source);
<del> case 3: return new ImmutableVector3((Vector3)source);
<del> case 4: return new ImmutableVector4((Vector4)source);
<add> case 2: return copy((Vector2)source);
<add> case 3: return copy((Vector3)source);
<add> case 4: return copy((Vector4)source);
<ide> default: return new ImmutableVector(source);
<ide> }
<ide> }
<ide> */
<ide> public static ImmutableVector2 copy(Vector2 source) {
<ide> if (source instanceof ImmutableVector2) return (ImmutableVector2) source;
<del> return new ImmutableVector2(source);
<add> return createVector(source.getX(), source.getY());
<ide> }
<ide>
<ide> /**
<ide> */
<ide> public static ImmutableVector3 copy(Vector3 source) {
<ide> if (source instanceof ImmutableVector3) return (ImmutableVector3) source;
<del> return new ImmutableVector3(source);
<add> return createVector(source.getX(), source.getY(), source.getZ());
<ide> }
<ide>
<ide> /**
<ide> */
<ide> public static ImmutableVector4 copy(Vector4 source) {
<ide> if (source instanceof ImmutableVector4) return (ImmutableVector4) source;
<del> return new ImmutableVector4(source);
<add> return createVector(source.getX(), source.getY(), source.getZ(), source.getH());
<ide> }
<ide>
<ide> private static ImmutableMatrix create(ImmutableData data) { |
|
Java | agpl-3.0 | 3804e73dc9a51bd361096eba72e650688a5db4d6 | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 0f0e7420-2e61-11e5-9284-b827eb9e62be | hello.java | 0f08d43e-2e61-11e5-9284-b827eb9e62be | 0f0e7420-2e61-11e5-9284-b827eb9e62be | hello.java | 0f0e7420-2e61-11e5-9284-b827eb9e62be | <ide><path>ello.java
<del>0f08d43e-2e61-11e5-9284-b827eb9e62be
<add>0f0e7420-2e61-11e5-9284-b827eb9e62be |
|
Java | apache-2.0 | 45cb55d67006eca1ce615fe1ce601da3b66e990b | 0 | sanderginn/isis,incodehq/isis,kidaa/isis,peridotperiod/isis,sanderginn/isis,estatio/isis,sanderginn/isis,oscarbou/isis,kidaa/isis,apache/isis,apache/isis,sanderginn/isis,howepeng/isis,peridotperiod/isis,peridotperiod/isis,oscarbou/isis,oscarbou/isis,howepeng/isis,incodehq/isis,estatio/isis,peridotperiod/isis,kidaa/isis,niv0/isis,apache/isis,incodehq/isis,niv0/isis,niv0/isis,howepeng/isis,apache/isis,incodehq/isis,apache/isis,howepeng/isis,apache/isis,estatio/isis,oscarbou/isis,estatio/isis,kidaa/isis,niv0/isis | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.viewer.wicket.ui.components.scalars.primitive;
import de.agilecoders.wicket.extensions.markup.html.bootstrap.form.checkboxx.CheckBoxX;
import de.agilecoders.wicket.extensions.markup.html.bootstrap.form.checkboxx.CheckBoxXConfig;
import org.apache.wicket.AttributeModifier;
import org.apache.wicket.Component;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.FormComponentLabel;
import org.apache.wicket.model.Model;
import org.apache.isis.core.metamodel.adapter.ObjectAdapter;
import org.apache.isis.core.metamodel.spec.ObjectSpecification;
import org.apache.isis.viewer.wicket.model.models.ScalarModel;
import org.apache.isis.viewer.wicket.ui.components.scalars.ScalarPanelAbstract;
import org.apache.isis.viewer.wicket.ui.util.CssClassAppender;
/**
* Panel for rendering scalars of type {@link Boolean} or <tt>boolean</tt>.
*/
public class BooleanPanel extends ScalarPanelAbstract {
private static final CheckBoxXConfig THREE_STATE_CONFIG = new CheckBoxXConfig()
.withSize(CheckBoxXConfig.Sizes.xs)
.withEnclosedLabel(true)
.withIconChecked("<i class='fa fa-check'></i>")
.withIconNull("<i class='fa fa-square'></i>");
private static final CheckBoxXConfig TWO_STATE_CONFIG = new CheckBoxXConfig(THREE_STATE_CONFIG).withThreeState(false);
private static final long serialVersionUID = 1L;
private static final String ID_SCALAR_IF_REGULAR = "scalarIfRegular";
private static final String ID_SCALAR_NAME = "scalarName";
private static final String ID_SCALAR_VALUE = "scalarValue";
private static final String ID_SCALAR_IF_COMPACT = "scalarIfCompact";
private CheckBoxX checkBox;
public BooleanPanel(final String id, final ScalarModel scalarModel) {
super(id, scalarModel);
}
@Override
protected FormComponentLabel addComponentForRegular() {
final String name = getModel().getName();
checkBox = createCheckBox(ID_SCALAR_VALUE);
checkBox.setLabel(Model.of(name));
final FormComponentLabel labelIfRegular = new FormComponentLabel(ID_SCALAR_IF_REGULAR, checkBox);
labelIfRegular.add(checkBox);
if(getModel().isRequired()) {
labelIfRegular.add(new CssClassAppender("mandatory"));
}
final String describedAs = getModel().getDescribedAs();
if(describedAs != null) {
labelIfRegular.add(new AttributeModifier("title", Model.of(describedAs)));
}
final Label scalarName = new Label(ID_SCALAR_NAME, getRendering().getLabelCaption(checkBox));
addOrReplace(scalarName);
addOrReplace(labelIfRegular);
addFeedbackTo(labelIfRegular, checkBox);
addAdditionalLinksTo(labelIfRegular);
return labelIfRegular;
}
/**
* Mandatory hook method to build the component to render the model when in
* {@link Rendering#COMPACT compact} format.
*/
@Override
protected Component addComponentForCompact() {
final CheckBoxX component = createCheckBox(ID_SCALAR_IF_COMPACT);
addOrReplace(component);
return component;
}
private CheckBoxX createCheckBox(final String id) {
final CheckBoxX checkBox = new CheckBoxX(id, new Model<Boolean>() {
private static final long serialVersionUID = 1L;
@Override
public Boolean getObject() {
final ScalarModel model = getModel();
final ObjectAdapter adapter = model.getObject();
return adapter != null? (Boolean) adapter.getObject(): null;
}
@Override
public void setObject(final Boolean object) {
final ObjectAdapter adapter = getAdapterManager().adapterFor(object);
getModel().setObject(adapter);
}
}) {
@Override
public CheckBoxXConfig getConfig() {
return BooleanPanel.this.getModel().isRequired()
? TWO_STATE_CONFIG
: THREE_STATE_CONFIG;
}
};
checkBox.setOutputMarkupId(true);
checkBox.setEnabled(false); // will be enabled before rendering if
// required
// must prime the underlying model if this is a primitive boolean
final ObjectSpecification objectSpecification = getModel().getTypeOfSpecification();
if(objectSpecification.getFullIdentifier().equals("boolean")) {
if(getModel().getObject() == null) {
getModel().setObject(getAdapterManager().adapterFor(false));
}
}
return checkBox;
}
@Override
protected void onBeforeRenderWhenEnabled() {
super.onBeforeRenderWhenEnabled();
checkBox.setEnabled(true);
}
@Override
protected void onBeforeRenderWhenViewMode() {
super.onBeforeRenderWhenViewMode();
checkBox.setEnabled(false);
}
@Override
protected void onBeforeRenderWhenDisabled(final String disableReason) {
super.onBeforeRenderWhenDisabled(disableReason);
checkBox.setEnabled(false);
}
@Override
protected void addFormComponentBehavior(Behavior behavior) {
checkBox.add(behavior);
}
}
| component/viewer/wicket/ui/src/main/java/org/apache/isis/viewer/wicket/ui/components/scalars/primitive/BooleanPanel.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.viewer.wicket.ui.components.scalars.primitive;
import de.agilecoders.wicket.extensions.markup.html.bootstrap.form.checkboxx.CheckBoxX;
import de.agilecoders.wicket.extensions.markup.html.bootstrap.form.checkboxx.CheckBoxXConfig;
import org.apache.wicket.AttributeModifier;
import org.apache.wicket.Component;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.FormComponentLabel;
import org.apache.wicket.model.Model;
import org.apache.isis.core.metamodel.adapter.ObjectAdapter;
import org.apache.isis.core.metamodel.spec.ObjectSpecification;
import org.apache.isis.viewer.wicket.model.models.ScalarModel;
import org.apache.isis.viewer.wicket.ui.components.scalars.ScalarPanelAbstract;
import org.apache.isis.viewer.wicket.ui.util.CssClassAppender;
/**
* Panel for rendering scalars of type {@link Boolean} or <tt>boolean</tt>.
*/
public class BooleanPanel extends ScalarPanelAbstract {
private static final CheckBoxXConfig THREE_STATE_CONFIG = new CheckBoxXConfig()
.withSize(CheckBoxXConfig.Sizes.xs)
.withLabelClickEvent(false)
.withIconChecked("<i class='fa fa-check'></i>")
.withIconNull("<i class='fa fa-square'></i>");
private static final CheckBoxXConfig TWO_STATE_CONFIG = new CheckBoxXConfig(THREE_STATE_CONFIG).withThreeState(false);
private static final long serialVersionUID = 1L;
private static final String ID_SCALAR_IF_REGULAR = "scalarIfRegular";
private static final String ID_SCALAR_NAME = "scalarName";
private static final String ID_SCALAR_VALUE = "scalarValue";
private static final String ID_SCALAR_IF_COMPACT = "scalarIfCompact";
private CheckBoxX checkBox;
public BooleanPanel(final String id, final ScalarModel scalarModel) {
super(id, scalarModel);
}
@Override
protected FormComponentLabel addComponentForRegular() {
final String name = getModel().getName();
checkBox = createCheckBox(ID_SCALAR_VALUE);
checkBox.setLabel(Model.of(name));
final FormComponentLabel labelIfRegular = new FormComponentLabel(ID_SCALAR_IF_REGULAR, checkBox);
labelIfRegular.add(checkBox);
if(getModel().isRequired()) {
labelIfRegular.add(new CssClassAppender("mandatory"));
}
final String describedAs = getModel().getDescribedAs();
if(describedAs != null) {
labelIfRegular.add(new AttributeModifier("title", Model.of(describedAs)));
}
final Label scalarName = new Label(ID_SCALAR_NAME, getRendering().getLabelCaption(checkBox));
addOrReplace(scalarName);
addOrReplace(labelIfRegular);
addFeedbackTo(labelIfRegular, checkBox);
addAdditionalLinksTo(labelIfRegular);
return labelIfRegular;
}
/**
* Mandatory hook method to build the component to render the model when in
* {@link Rendering#COMPACT compact} format.
*/
@Override
protected Component addComponentForCompact() {
final CheckBoxX component = createCheckBox(ID_SCALAR_IF_COMPACT);
addOrReplace(component);
return component;
}
private CheckBoxX createCheckBox(final String id) {
final CheckBoxX checkBox = new CheckBoxX(id, new Model<Boolean>() {
private static final long serialVersionUID = 1L;
@Override
public Boolean getObject() {
final ScalarModel model = getModel();
final ObjectAdapter adapter = model.getObject();
return adapter != null? (Boolean) adapter.getObject(): null;
}
@Override
public void setObject(final Boolean object) {
final ObjectAdapter adapter = getAdapterManager().adapterFor(object);
getModel().setObject(adapter);
}
}) {
@Override
public CheckBoxXConfig getConfig() {
return BooleanPanel.this.getModel().isRequired()
? TWO_STATE_CONFIG
: THREE_STATE_CONFIG;
}
};
checkBox.setOutputMarkupId(true);
checkBox.setEnabled(false); // will be enabled before rendering if
// required
// must prime the underlying model if this is a primitive boolean
final ObjectSpecification objectSpecification = getModel().getTypeOfSpecification();
if(objectSpecification.getFullIdentifier().equals("boolean")) {
if(getModel().getObject() == null) {
getModel().setObject(getAdapterManager().adapterFor(false));
}
}
return checkBox;
}
@Override
protected void onBeforeRenderWhenEnabled() {
super.onBeforeRenderWhenEnabled();
checkBox.setEnabled(true);
}
@Override
protected void onBeforeRenderWhenViewMode() {
super.onBeforeRenderWhenViewMode();
checkBox.setEnabled(false);
}
@Override
protected void onBeforeRenderWhenDisabled(final String disableReason) {
super.onBeforeRenderWhenDisabled(disableReason);
checkBox.setEnabled(false);
}
@Override
protected void addFormComponentBehavior(Behavior behavior) {
checkBox.add(behavior);
}
}
| ISIS-537 Use the new version of CheckBoxX (Wicket-Bootstrap). Now optional Boolean properties has three states
| component/viewer/wicket/ui/src/main/java/org/apache/isis/viewer/wicket/ui/components/scalars/primitive/BooleanPanel.java | ISIS-537 Use the new version of CheckBoxX (Wicket-Bootstrap). Now optional Boolean properties has three states | <ide><path>omponent/viewer/wicket/ui/src/main/java/org/apache/isis/viewer/wicket/ui/components/scalars/primitive/BooleanPanel.java
<ide>
<ide> private static final CheckBoxXConfig THREE_STATE_CONFIG = new CheckBoxXConfig()
<ide> .withSize(CheckBoxXConfig.Sizes.xs)
<del> .withLabelClickEvent(false)
<add> .withEnclosedLabel(true)
<ide> .withIconChecked("<i class='fa fa-check'></i>")
<ide> .withIconNull("<i class='fa fa-square'></i>");
<ide> |
|
Java | apache-2.0 | 6c4d49c2251b9ff8e57f917857b4cbed937c6bf2 | 0 | ibissource/iaf,ibissource/iaf,ibissource/iaf,ibissource/iaf,ibissource/iaf | /*
* $Log: FixedForwardPipe.java,v $
* Revision 1.5 2004-10-19 13:52:31 L190409
* super.configure in configure()
*
* Revision 1.4 2004/03/26 10:42:34 Johan Verrips <[email protected]>
* added @version tag in javadoc
*
* Revision 1.3 2004/03/24 14:04:26 Gerrit van Brakel <[email protected]>
* getLogPrefix in thrown exceptions
*
*/
package nl.nn.adapterframework.pipes;
import nl.nn.adapterframework.configuration.ConfigurationException;
import nl.nn.adapterframework.core.PipeForward;
/**
* Provides provides a base-class for a Pipe that has always the same forward.
* Ancestor classes should call <code>super.configure()</code> in their <code>configure()</code>-methods.
*
* <p><b>Configuration:</b>
* <table border="1">
* <tr><th>attributes</th><th>description</th><th>default</th></tr>
* <tr><td>{@link #setName(String) name}</td><td>name of the Pipe</td><td> </td></tr>
* <tr><td>{@link #setMaxThreads(int) maxThreads}</td><td>maximum number of threads that may call {@link #doPipe(Object, nl.nn.adapterframework.core.PipeLineSession)} simultaneously</td><td>0 (unlimited)</td></tr>
* <tr><td>{@link #setForwardName(String) forwardName}</td> <td>name of forward returned upon completion</td><td>"success"</td></tr>
* </table>
* </p>
* <p><b>Exits:</b>
* <table border="1">
* <tr><th>state</th><th>condition</th></tr>
* <tr><td>"success"</td><td>default</td></tr>
* <tr><td><i>{@link #setForwardName(String) forwardName}</i></td><td>if specified</td></tr>
* </table>
* </p>
* @version Id
* @author Gerrit van Brakel
*/
public class FixedForwardPipe extends AbstractPipe {
public static final String version="$Id: FixedForwardPipe.java,v 1.5 2004-10-19 13:52:31 L190409 Exp $";
private String forwardName = "success";
private PipeForward forward;
/**
* checks for correct configuration of forward
*/
public void configure() throws ConfigurationException {
super.configure();
forward = findForward(forwardName);
if (forward == null)
throw new ConfigurationException(getLogPrefix(null) + "has no forward with name [" + forwardName + "]");
}
protected PipeForward getForward() {
return forward;
}
/**
* Sets the name of the <code>forward</code> that is looked up
* upon completion.
*/
public void setForwardName(String forwardName) {
this.forwardName = forwardName;
}
public String getForwardName() {
return forwardName;
}
}
| JavaSource/nl/nn/adapterframework/pipes/FixedForwardPipe.java | /*
* $Log: FixedForwardPipe.java,v $
* Revision 1.4 2004-03-26 10:42:34 NNVZNL01#L180564
* added @version tag in javadoc
*
* Revision 1.3 2004/03/24 14:04:26 Gerrit van Brakel <[email protected]>
* getLogPrefix in thrown exceptions
*
*/
package nl.nn.adapterframework.pipes;
import nl.nn.adapterframework.configuration.ConfigurationException;
import nl.nn.adapterframework.core.PipeForward;
/**
* Provides provides a base-class for a Pipe that has always the same forward.
* Ancestor classes should call <code>super.configure()</code> in their <code>configure()</code>-methods.
*
* <p><b>Configuration:</b>
* <table border="1">
* <tr><th>attributes</th><th>description</th><th>default</th></tr>
* <tr><td>{@link #setName(String) name}</td><td>name of the Pipe</td><td> </td></tr>
* <tr><td>{@link #setMaxThreads(int) maxThreads}</td><td>maximum number of threads that may call {@link #doPipe(Object, nl.nn.adapterframework.core.PipeLineSession)} simultaneously</td><td>0 (unlimited)</td></tr>
* <tr><td>{@link #setForwardName(String) forwardName}</td> <td>name of forward returned upon completion</td><td>"success"</td></tr>
* </table>
* </p>
* <p><b>Exits:</b>
* <table border="1">
* <tr><th>state</th><th>condition</th></tr>
* <tr><td>"success"</td><td>default</td></tr>
* <tr><td><i>{@link #setForwardName(String) forwardName}</i></td><td>if specified</td></tr>
* </table>
* </p>
* @version Id
* @author Gerrit van Brakel
*/
public class FixedForwardPipe extends AbstractPipe {
public static final String version="$Id: FixedForwardPipe.java,v 1.4 2004-03-26 10:42:34 NNVZNL01#L180564 Exp $";
private String forwardName = "success";
private PipeForward forward;
/**
* checks for correct configuration of forward
*/
public void configure() throws ConfigurationException {
forward = findForward(forwardName);
if (forward == null)
throw new ConfigurationException(getLogPrefix(null) + "has no forward with name [" + forwardName + "]");
}
protected PipeForward getForward() {
return forward;
}
/**
* Sets the name of the <code>forward</code> that is looked up
* upon completion.
*/
public void setForwardName(String forwardName) {
this.forwardName = forwardName;
}
public String getForwardName() {
return forwardName;
}
}
| super.configure in configure()
| JavaSource/nl/nn/adapterframework/pipes/FixedForwardPipe.java | super.configure in configure() | <ide><path>avaSource/nl/nn/adapterframework/pipes/FixedForwardPipe.java
<ide> /*
<ide> * $Log: FixedForwardPipe.java,v $
<del> * Revision 1.4 2004-03-26 10:42:34 NNVZNL01#L180564
<add> * Revision 1.5 2004-10-19 13:52:31 L190409
<add> * super.configure in configure()
<add> *
<add> * Revision 1.4 2004/03/26 10:42:34 Johan Verrips <[email protected]>
<ide> * added @version tag in javadoc
<ide> *
<ide> * Revision 1.3 2004/03/24 14:04:26 Gerrit van Brakel <[email protected]>
<ide> * @author Gerrit van Brakel
<ide> */
<ide> public class FixedForwardPipe extends AbstractPipe {
<del> public static final String version="$Id: FixedForwardPipe.java,v 1.4 2004-03-26 10:42:34 NNVZNL01#L180564 Exp $";
<add> public static final String version="$Id: FixedForwardPipe.java,v 1.5 2004-10-19 13:52:31 L190409 Exp $";
<ide>
<ide> private String forwardName = "success";
<ide> private PipeForward forward;
<ide> * checks for correct configuration of forward
<ide> */
<ide> public void configure() throws ConfigurationException {
<add> super.configure();
<ide> forward = findForward(forwardName);
<ide> if (forward == null)
<ide> throw new ConfigurationException(getLogPrefix(null) + "has no forward with name [" + forwardName + "]"); |
|
Java | apache-2.0 | 74ff9f45a1ee94e8b69ca0bda4642d9e169c8972 | 0 | thomsonreuters/jmeter,hemikak/jmeter,kyroskoh/jmeter,ubikfsabbe/jmeter,vherilier/jmeter,ubikloadpack/jmeter,liwangbest/jmeter,max3163/jmeter,etnetera/jmeter,kschroeder/jmeter,d0k1/jmeter,ubikloadpack/jmeter,etnetera/jmeter,d0k1/jmeter,fj11/jmeter,hemikak/jmeter,fj11/jmeter,etnetera/jmeter,irfanah/jmeter,kyroskoh/jmeter,kschroeder/jmeter,ra0077/jmeter,kyroskoh/jmeter,irfanah/jmeter,vherilier/jmeter,max3163/jmeter,ra0077/jmeter,ubikloadpack/jmeter,d0k1/jmeter,d0k1/jmeter,DoctorQ/jmeter,fj11/jmeter,etnetera/jmeter,tuanhq/jmeter,ra0077/jmeter,vherilier/jmeter,tuanhq/jmeter,ubikfsabbe/jmeter,liwangbest/jmeter,ubikfsabbe/jmeter,irfanah/jmeter,hemikak/jmeter,hizhangqi/jmeter-1,thomsonreuters/jmeter,etnetera/jmeter,ThiagoGarciaAlves/jmeter,thomsonreuters/jmeter,ra0077/jmeter,kschroeder/jmeter,liwangbest/jmeter,max3163/jmeter,ubikfsabbe/jmeter,hizhangqi/jmeter-1,hemikak/jmeter,DoctorQ/jmeter,ubikloadpack/jmeter,ThiagoGarciaAlves/jmeter,tuanhq/jmeter,ThiagoGarciaAlves/jmeter,max3163/jmeter,hizhangqi/jmeter-1,vherilier/jmeter,DoctorQ/jmeter | package org.apache.jmeter.modifiers;
import java.io.Serializable;
import org.apache.jmeter.config.ConfigTestElement;
import org.apache.jmeter.testelement.ThreadListener;
import org.apache.jmeter.testelement.VariablesCollection;
import org.apache.jmeter.threads.JMeterVariables;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
/**
* @author Administrator
*
* To change this generated comment edit the template variable "typecomment":
* Window>Preferences>Java>Templates.
*/
public class CounterConfig
extends ConfigTestElement
implements Serializable, ThreadListener
{
private static Logger log = LoggingManager.getLoggerFor(JMeterUtils.ELEMENTS);
private final static String START = "CounterConfig.start";
private final static String END = "CounterConfig.end";
private final static String INCREMENT = "CounterConfig.incr";
private final static String PER_USER = "CounterConfig.per_user";
private final static String VAR_NAME = "CounterConfig.name";
private boolean perUser = false;
private int globalCounter = -1;
private int increment = 1;
private int start = 0;
private int end = Integer.MAX_VALUE;
private VariablesCollection vars = new VariablesCollection();
private int currentIterationCount = -1;
/**
* @see org.apache.jmeter.testelement.ThreadListener#iterationStarted(int)
*/
public synchronized void iterationStarted(int iterationCount)
{
JMeterVariables variables = vars.getVariables();
if(!perUser)
{
globalCounter++;
int value = start + (increment * globalCounter);
if(value > end)
{
globalCounter = 0;
value = start;
}
variables.put(getVarName(),Integer.toString(value));
}
else
{
String value = variables.get(getVarName());
if(value == null)
{
variables.put(getVarName(),Integer.toString(start));
}
else
{
try
{
int current = Integer.parseInt(value);
current += increment;
if(current > end)
{
current = start;
}
variables.put(getVarName(),Integer.toString(current));
}
catch(NumberFormatException e)
{
log.info("Bad number in Counter config",e);
}
}
}
}
/**
* @see org.apache.jmeter.testelement.ThreadListener#setJMeterVariables(JMeterVariables)
*/
public void setJMeterVariables(JMeterVariables jmVars)
{
vars.addJMeterVariables(jmVars);
start = getStart();
end = getEnd();
increment = getIncrement();
perUser = isPerUser();
}
public void setStart(int start)
{
setProperty(START,new Integer(start));
}
public void setStart(String start)
{
setProperty(START,start);
}
public int getStart()
{
return getPropertyAsInt(START);
}
public void setEnd(int end)
{
setProperty(END,new Integer(end));
}
public void setEnd(String end)
{
setProperty(END,end);
}
public int getEnd()
{
return getPropertyAsInt(END);
}
public void setIncrement(int inc)
{
setProperty(INCREMENT,new Integer(inc));
}
public void setIncrement(String incr)
{
setProperty(INCREMENT,incr);
}
public int getIncrement()
{
return getPropertyAsInt(INCREMENT);
}
public void setIsPerUser(boolean isPer)
{
setProperty(PER_USER,new Boolean(isPer));
}
public boolean isPerUser()
{
return getPropertyAsBoolean(PER_USER);
}
public void setVarName(String name)
{
setProperty(VAR_NAME,name);
}
public String getVarName()
{
return getPropertyAsString(VAR_NAME);
}
}
| src/components/org/apache/jmeter/modifiers/CounterConfig.java | package org.apache.jmeter.modifiers;
import java.io.Serializable;
import org.apache.jmeter.config.ConfigTestElement;
import org.apache.jmeter.testelement.ThreadListener;
import org.apache.jmeter.testelement.VariablesCollection;
import org.apache.jmeter.threads.JMeterVariables;
/**
* @author Administrator
*
* To change this generated comment edit the template variable "typecomment":
* Window>Preferences>Java>Templates.
*/
public class CounterConfig
extends ConfigTestElement
implements Serializable, ThreadListener
{
private final static String START = "CounterConfig.start";
private final static String END = "CounterConfig.end";
private final static String INCREMENT = "CounterConfig.incr";
private final static String PER_USER = "CounterConfig.per_user";
private final static String VAR_NAME = "CounterConfig.name";
private boolean perUser = false;
private int globalCounter = -1;
private int increment = 1;
private int start = 0;
private int end = Integer.MAX_VALUE;
private VariablesCollection vars = new VariablesCollection();
/**
* @see org.apache.jmeter.testelement.ThreadListener#iterationStarted(int)
*/
public synchronized void iterationStarted(int iterationCount)
{
JMeterVariables variables = vars.getVariables();
if(perUser)
{
int value = start + (increment * (iterationCount-1));
value = value % end;
variables.put(getVarName(),Integer.toString(value));
}
else
{
globalCounter++;
int value = start + (increment * globalCounter);
value = value % end;
variables.put(getVarName(),Integer.toString(value));
}
}
/**
* @see org.apache.jmeter.testelement.ThreadListener#setJMeterVariables(JMeterVariables)
*/
public void setJMeterVariables(JMeterVariables jmVars)
{
vars.addJMeterVariables(jmVars);
start = getStart();
end = getEnd();
increment = getIncrement();
perUser = isPerUser();
}
public void setStart(int start)
{
setProperty(START,new Integer(start));
}
public void setStart(String start)
{
setProperty(START,start);
}
public int getStart()
{
return getPropertyAsInt(START);
}
public void setEnd(int end)
{
setProperty(END,new Integer(end));
}
public void setEnd(String end)
{
setProperty(END,end);
}
public int getEnd()
{
return getPropertyAsInt(END);
}
public void setIncrement(int inc)
{
setProperty(INCREMENT,new Integer(inc));
}
public void setIncrement(String incr)
{
setProperty(INCREMENT,incr);
}
public int getIncrement()
{
return getPropertyAsInt(INCREMENT);
}
public void setIsPerUser(boolean isPer)
{
setProperty(PER_USER,new Boolean(isPer));
}
public boolean isPerUser()
{
return getPropertyAsBoolean(PER_USER);
}
public void setVarName(String name)
{
setProperty(VAR_NAME,name);
}
public String getVarName()
{
return getPropertyAsString(VAR_NAME);
}
}
| Fixing counter config bug
git-svn-id: 7c053b8fbd1fb5868f764c6f9536fc6a9bbe7da9@322917 13f79535-47bb-0310-9956-ffa450edef68
| src/components/org/apache/jmeter/modifiers/CounterConfig.java | Fixing counter config bug | <ide><path>rc/components/org/apache/jmeter/modifiers/CounterConfig.java
<ide> import org.apache.jmeter.testelement.ThreadListener;
<ide> import org.apache.jmeter.testelement.VariablesCollection;
<ide> import org.apache.jmeter.threads.JMeterVariables;
<add>import org.apache.jmeter.util.JMeterUtils;
<add>import org.apache.jorphan.logging.LoggingManager;
<add>import org.apache.log.Logger;
<ide> /**
<ide> * @author Administrator
<ide> *
<ide> extends ConfigTestElement
<ide> implements Serializable, ThreadListener
<ide> {
<add> private static Logger log = LoggingManager.getLoggerFor(JMeterUtils.ELEMENTS);
<ide> private final static String START = "CounterConfig.start";
<ide> private final static String END = "CounterConfig.end";
<ide> private final static String INCREMENT = "CounterConfig.incr";
<ide> private int start = 0;
<ide> private int end = Integer.MAX_VALUE;
<ide> private VariablesCollection vars = new VariablesCollection();
<add> private int currentIterationCount = -1;
<ide> /**
<ide> * @see org.apache.jmeter.testelement.ThreadListener#iterationStarted(int)
<ide> */
<ide> public synchronized void iterationStarted(int iterationCount)
<ide> {
<ide> JMeterVariables variables = vars.getVariables();
<del> if(perUser)
<add> if(!perUser)
<ide> {
<del> int value = start + (increment * (iterationCount-1));
<del> value = value % end;
<add> globalCounter++;
<add> int value = start + (increment * globalCounter);
<add> if(value > end)
<add> {
<add> globalCounter = 0;
<add> value = start;
<add> }
<ide> variables.put(getVarName(),Integer.toString(value));
<ide> }
<ide> else
<del> {
<del> globalCounter++;
<del> int value = start + (increment * globalCounter);
<del> value = value % end;
<del> variables.put(getVarName(),Integer.toString(value));
<del> }
<add> {
<add> String value = variables.get(getVarName());
<add> if(value == null)
<add> {
<add> variables.put(getVarName(),Integer.toString(start));
<add> }
<add> else
<add> {
<add> try
<add> {
<add> int current = Integer.parseInt(value);
<add> current += increment;
<add> if(current > end)
<add> {
<add> current = start;
<add> }
<add> variables.put(getVarName(),Integer.toString(current));
<add> }
<add> catch(NumberFormatException e)
<add> {
<add> log.info("Bad number in Counter config",e);
<add> }
<add> }
<add> }
<ide> }
<ide>
<ide> /** |
|
Java | apache-2.0 | error: pathspec 'support/jboss/src/org/jsecurity/ri/jboss/aop/JBossAopAuthorizationInterceptor.java' did not match any file(s) known to git
| 38f2683992afa21c2f593886ea9db5cab6da93c5 | 1 | StempG/shiro,haxwell/apache-shiro-1.2.3,yaotj/shiro,Collaborne/shiro,chapmajs/shiro,borigue/shiro,lucamilanesio/shiro,IBYoung/shiro,apache/jsecurity,taoguan/shiro,ivansun1010/shiro,relateiq/shiro,universsky/shiro,Kevin2030/shiro | package org.jsecurity.ri.jboss.aop;
import org.jboss.aop.advice.Interceptor;
import org.jboss.aop.joinpoint.Invocation;
import org.jboss.aop.joinpoint.MethodInvocation;
import org.jsecurity.authz.AuthorizedAction;
import org.jsecurity.authz.Authorizer;
import org.jsecurity.authz.aop.AbstractAuthorizationInterceptor;
import java.lang.reflect.Method;
/**
* @since 0.2
* @author Les Hazlewood
*/
public class JBossAopAuthorizationInterceptor
extends AbstractAuthorizationInterceptor implements Interceptor {
private static final String NAME = "JSecurity JBossAopAuthorizationInterceptor";
public JBossAopAuthorizationInterceptor(){}
public JBossAopAuthorizationInterceptor( Authorizer authorizer ) {
setAuthorizer( authorizer );
}
public String getName() {
return NAME;
}
protected AuthorizedAction createAuthzAction( Object jbossAopInvocation ) {
final MethodInvocation mi = (MethodInvocation)jbossAopInvocation;
org.jsecurity.authz.method.MethodInvocation jsecurityMI =
new org.jsecurity.authz.method.MethodInvocation() {
public Method getMethod() {
return mi.getMethod();
}
public Object[] getArguments() {
return mi.getArguments();
}
public String toString() {
return "Method invocation [" + mi.getMethod() + "]";
}
};
return jsecurityMI;
}
protected Object continueInvocation( Object jbossAopInvocation ) throws Throwable {
Invocation invocation = (Invocation)jbossAopInvocation;
return invocation.invokeNext();
}
public Object invoke( final Invocation invocation ) throws Throwable {
return super.invoke( invocation );
}
}
| support/jboss/src/org/jsecurity/ri/jboss/aop/JBossAopAuthorizationInterceptor.java | minor build changes to build spring and jboss support
git-svn-id: ec6ef1d57ec0831ce4cbff3b75527511e63bfbe3@710453 13f79535-47bb-0310-9956-ffa450edef68
| support/jboss/src/org/jsecurity/ri/jboss/aop/JBossAopAuthorizationInterceptor.java | minor build changes to build spring and jboss support | <ide><path>upport/jboss/src/org/jsecurity/ri/jboss/aop/JBossAopAuthorizationInterceptor.java
<add>package org.jsecurity.ri.jboss.aop;
<add>
<add>import org.jboss.aop.advice.Interceptor;
<add>import org.jboss.aop.joinpoint.Invocation;
<add>import org.jboss.aop.joinpoint.MethodInvocation;
<add>import org.jsecurity.authz.AuthorizedAction;
<add>import org.jsecurity.authz.Authorizer;
<add>import org.jsecurity.authz.aop.AbstractAuthorizationInterceptor;
<add>
<add>import java.lang.reflect.Method;
<add>
<add>/**
<add> * @since 0.2
<add> * @author Les Hazlewood
<add> */
<add>public class JBossAopAuthorizationInterceptor
<add> extends AbstractAuthorizationInterceptor implements Interceptor {
<add>
<add> private static final String NAME = "JSecurity JBossAopAuthorizationInterceptor";
<add>
<add> public JBossAopAuthorizationInterceptor(){}
<add>
<add> public JBossAopAuthorizationInterceptor( Authorizer authorizer ) {
<add> setAuthorizer( authorizer );
<add> }
<add>
<add> public String getName() {
<add> return NAME;
<add> }
<add>
<add> protected AuthorizedAction createAuthzAction( Object jbossAopInvocation ) {
<add> final MethodInvocation mi = (MethodInvocation)jbossAopInvocation;
<add>
<add> org.jsecurity.authz.method.MethodInvocation jsecurityMI =
<add> new org.jsecurity.authz.method.MethodInvocation() {
<add> public Method getMethod() {
<add> return mi.getMethod();
<add> }
<add>
<add> public Object[] getArguments() {
<add> return mi.getArguments();
<add> }
<add>
<add> public String toString() {
<add> return "Method invocation [" + mi.getMethod() + "]";
<add> }
<add> };
<add>
<add> return jsecurityMI;
<add> }
<add>
<add> protected Object continueInvocation( Object jbossAopInvocation ) throws Throwable {
<add> Invocation invocation = (Invocation)jbossAopInvocation;
<add> return invocation.invokeNext();
<add> }
<add>
<add> public Object invoke( final Invocation invocation ) throws Throwable {
<add> return super.invoke( invocation );
<add> }
<add>
<add>} |
|
Java | mit | 68fb5104491b80143500ab9cf911a9036b955dbb | 0 | shitikanth/jabref,tschechlovdev/jabref,tschechlovdev/jabref,zellerdev/jabref,Siedlerchr/jabref,obraliar/jabref,oscargus/jabref,Mr-DLib/jabref,mairdl/jabref,motokito/jabref,shitikanth/jabref,zellerdev/jabref,mredaelli/jabref,bartsch-dev/jabref,oscargus/jabref,tobiasdiez/jabref,bartsch-dev/jabref,ayanai1/jabref,grimes2/jabref,ayanai1/jabref,Braunch/jabref,Mr-DLib/jabref,Braunch/jabref,mairdl/jabref,Mr-DLib/jabref,sauliusg/jabref,mredaelli/jabref,Siedlerchr/jabref,bartsch-dev/jabref,motokito/jabref,zellerdev/jabref,obraliar/jabref,sauliusg/jabref,tobiasdiez/jabref,ayanai1/jabref,tschechlovdev/jabref,grimes2/jabref,mredaelli/jabref,mredaelli/jabref,JabRef/jabref,oscargus/jabref,Mr-DLib/jabref,jhshinn/jabref,sauliusg/jabref,bartsch-dev/jabref,ayanai1/jabref,jhshinn/jabref,Braunch/jabref,oscargus/jabref,mairdl/jabref,motokito/jabref,tschechlovdev/jabref,shitikanth/jabref,tobiasdiez/jabref,Mr-DLib/jabref,obraliar/jabref,grimes2/jabref,Siedlerchr/jabref,mairdl/jabref,mairdl/jabref,jhshinn/jabref,oscargus/jabref,bartsch-dev/jabref,ayanai1/jabref,tschechlovdev/jabref,jhshinn/jabref,sauliusg/jabref,grimes2/jabref,grimes2/jabref,JabRef/jabref,JabRef/jabref,tobiasdiez/jabref,Siedlerchr/jabref,obraliar/jabref,Braunch/jabref,shitikanth/jabref,shitikanth/jabref,obraliar/jabref,zellerdev/jabref,JabRef/jabref,motokito/jabref,jhshinn/jabref,mredaelli/jabref,Braunch/jabref,zellerdev/jabref,motokito/jabref | /* Copyright (C) 2003-2011 JabRef contributors.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package net.sf.jabref.export;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.Writer;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.sf.jabref.BibtexDatabase;
import net.sf.jabref.BibtexEntry;
import net.sf.jabref.BibtexEntryType;
import net.sf.jabref.BibtexFields;
import net.sf.jabref.BibtexString;
import net.sf.jabref.BibtexStringComparator;
import net.sf.jabref.CrossRefEntryComparator;
import net.sf.jabref.CustomEntryType;
import net.sf.jabref.FieldComparator;
import net.sf.jabref.FieldComparatorStack;
import net.sf.jabref.GUIGlobals;
import net.sf.jabref.Globals;
import net.sf.jabref.IdComparator;
import net.sf.jabref.JabRefPreferences;
import net.sf.jabref.MetaData;
import ca.odell.glazedlists.BasicEventList;
import ca.odell.glazedlists.SortedList;
public class FileActions {
private static Pattern refPat = Pattern.compile("(#[A-Za-z]+#)"); // Used to detect string references in strings
private static BibtexString.Type previousStringType;
private static void writePreamble(Writer fw, String preamble) throws IOException {
if (preamble != null) {
fw.write("@PREAMBLE{");
fw.write(preamble);
fw.write("}" + Globals.NEWLINE + Globals.NEWLINE);
}
}
/**
* Write all strings in alphabetical order, modified to produce a safe (for
* BibTeX) order of the strings if they reference each other.
*
* @param fw The Writer to send the output to.
* @param database The database whose strings we should write.
* @throws IOException If anthing goes wrong in writing.
*/
private static void writeStrings(Writer fw, BibtexDatabase database) throws IOException {
previousStringType = BibtexString.Type.AUTHOR;
List<BibtexString> strings = new ArrayList<BibtexString>();
for (String s : database.getStringKeySet()) {
strings.add(database.getString(s));
}
Collections.sort(strings, new BibtexStringComparator(true));
// First, make a Map of all entries:
HashMap<String, BibtexString> remaining = new HashMap<String, BibtexString>();
int maxKeyLength = 0;
for (BibtexString string : strings) {
remaining.put(string.getName(), string);
maxKeyLength = Math.max(maxKeyLength, string.getName().length());
}
for (BibtexString.Type t : BibtexString.Type.values()) {
for (BibtexString bs : strings) {
if (remaining.containsKey(bs.getName()) && bs.getType() == t) {
writeString(fw, bs, remaining, maxKeyLength);
}
}
}
fw.write(Globals.NEWLINE);
}
private static void writeString(Writer fw, BibtexString bs, HashMap<String, BibtexString> remaining, int maxKeyLength) throws IOException {
// First remove this from the "remaining" list so it can't cause problem with circular refs:
remaining.remove(bs.getName());
// Then we go through the string looking for references to other strings. If we find references
// to strings that we will write, but still haven't, we write those before proceeding. This ensures
// that the string order will be acceptable for BibTeX.
String content = bs.getContent();
Matcher m;
while ((m = refPat.matcher(content)).find()) {
String foundLabel = m.group(1);
int restIndex = content.indexOf(foundLabel) + foundLabel.length();
content = content.substring(restIndex);
Object referred = remaining.get(foundLabel.substring(1, foundLabel.length() - 1));
// If the label we found exists as a key in the "remaining" Map, we go on and write it now:
if (referred != null) {
writeString(fw, (BibtexString) referred, remaining, maxKeyLength);
}
}
if (previousStringType != bs.getType()) {
fw.write(Globals.NEWLINE);
previousStringType = bs.getType();
}
String suffix = "";
for (int i = maxKeyLength - bs.getName().length(); i > 0; i--) {
suffix += " ";
}
fw.write("@String { " + bs.getName() + suffix + " = ");
if (!bs.getContent().equals("")) {
try {
String formatted = (new LatexFieldFormatter()).format(bs.getContent(), Globals.BIBTEX_STRING);
fw.write(formatted);
} catch (IllegalArgumentException ex) {
throw new IllegalArgumentException(
Globals.lang("The # character is not allowed in BibTeX strings unless escaped as in '\\#'.") + "\n"
+ Globals.lang("Before saving, please edit any strings containing the # character."));
}
} else {
fw.write("{}");
}
fw.write(" }" + Globals.NEWLINE);// + Globals.NEWLINE);
}
/**
* Writes the JabRef signature and the encoding.
*
* @param encoding String the name of the encoding, which is part of the
* header.
*/
private static void writeBibFileHeader(Writer out, String encoding) throws IOException {
out.write("% ");
out.write(GUIGlobals.SIGNATURE);
out.write(" " + GUIGlobals.version + "." + Globals.NEWLINE + "% "
+ GUIGlobals.encPrefix + encoding + Globals.NEWLINE + Globals.NEWLINE);
}
/**
* Saves the database to file. Two boolean values indicate whether only
* entries with a nonzero Globals.SEARCH value and only entries with a
* nonzero Globals.GROUPSEARCH value should be saved. This can be used to
* let the user save only the results of a search. False and false means all
* entries are saved.
*/
public static SaveSession saveDatabase(BibtexDatabase database,
MetaData metaData, File file, JabRefPreferences prefs,
boolean checkSearch, boolean checkGroup, String encoding, boolean suppressBackup)
throws SaveException {
TreeMap<String, BibtexEntryType> types = new TreeMap<String, BibtexEntryType>();
boolean backup = prefs.getBoolean("backup");
if (suppressBackup) {
backup = false;
}
SaveSession session;
BibtexEntry exceptionCause = null;
try {
session = new SaveSession(file, encoding, backup);
} catch (Throwable e) {
if (encoding != null) {
System.err.println("Error from encoding: '" + encoding + "' Len: " + encoding.length());
}
// we must catch all exceptions to be able notify users that
// saving failed, no matter what the reason was
// (and they won't just quit JabRef thinking
// everyting worked and loosing data)
e.printStackTrace();
throw new SaveException(e.getMessage());
}
try {
// Get our data stream. This stream writes only to a temporary file,
// until committed.
VerifyingWriter fw = session.getWriter();
// Write signature.
writeBibFileHeader(fw, encoding);
// Write preamble if there is one.
writePreamble(fw, database.getPreamble());
// Write strings if there are any.
writeStrings(fw, database);
// Write database entries. Take care, using CrossRefEntry-
// Comparator, that referred entries occur after referring
// ones. Apart from crossref requirements, entries will be
// sorted as they appear on the screen.
List<BibtexEntry> sorter = getSortedEntries(database, null, true);
FieldFormatter ff = new LatexFieldFormatter();
for (BibtexEntry be : sorter) {
exceptionCause = be;
// Check if we must write the type definition for this
// entry, as well. Our criterion is that all non-standard
// types (*not* customized standard types) must be written.
BibtexEntryType tp = be.getType();
if (BibtexEntryType.getStandardType(tp.getName()) == null) {
types.put(tp.getName(), tp);
}
// Check if the entry should be written.
boolean write = true;
if (checkSearch && !nonZeroField(be, BibtexFields.SEARCH)) {
write = false;
}
if (checkGroup && !nonZeroField(be, BibtexFields.GROUPSEARCH)) {
write = false;
}
if (write) {
be.write(fw, ff, true);
fw.write(Globals.NEWLINE);
}
}
// Write meta data.
if (metaData != null) {
metaData.writeMetaData(fw);
}
// Write type definitions, if any:
if (types.size() > 0) {
for (String s : types.keySet()) {
BibtexEntryType type = types.get(s);
if (type instanceof CustomEntryType) {
CustomEntryType tp = (CustomEntryType) type;
tp.save(fw);
fw.write(Globals.NEWLINE);
}
}
}
fw.close();
} catch (Throwable ex) {
ex.printStackTrace();
session.cancel();
// repairAfterError(file, backup, INIT_OK);
throw new SaveException(ex.getMessage(), exceptionCause);
}
return session;
}
private static class SaveSettings {
public final String pri, sec, ter;
public final boolean priD, secD, terD;
public SaveSettings(boolean isSaveOperation) {
/* three options:
* 1. original order (saveInOriginalOrder) -- not hit here as SaveSettings is not called in that case
* 2. current table sort order
* 3. ordered by specified order
*/
// This case should never be hit as SaveSettings() is never called if InOriginalOrder is true
assert isSaveOperation && !Globals.prefs.getBoolean(JabRefPreferences.SAVE_IN_ORIGINAL_ORDER);
assert !isSaveOperation && !Globals.prefs.getBoolean(JabRefPreferences.EXPORT_IN_ORIGINAL_ORDER);
if (isSaveOperation && Globals.prefs.getBoolean(JabRefPreferences.SAVE_IN_SPECIFIED_ORDER)) {
pri = Globals.prefs.get(JabRefPreferences.SAVE_PRIMARY_SORT_FIELD);
sec = Globals.prefs.get(JabRefPreferences.SAVE_SECONDARY_SORT_FIELD);
ter = Globals.prefs.get(JabRefPreferences.SAVE_TERTIARY_SORT_FIELD);
priD = Globals.prefs.getBoolean(JabRefPreferences.SAVE_PRIMARY_SORT_DESCENDING);
secD = Globals.prefs.getBoolean(JabRefPreferences.SAVE_SECONDARY_SORT_DESCENDING);
terD = Globals.prefs.getBoolean(JabRefPreferences.SAVE_TERTIARY_SORT_DESCENDING);
} else if (!isSaveOperation && Globals.prefs.getBoolean(JabRefPreferences.EXPORT_IN_SPECIFIED_ORDER)) {
pri = Globals.prefs.get(JabRefPreferences.EXPORT_PRIMARY_SORT_FIELD);
sec = Globals.prefs.get(JabRefPreferences.EXPORT_SECONDARY_SORT_FIELD);
ter = Globals.prefs.get(JabRefPreferences.EXPORT_TERTIARY_SORT_FIELD);
priD = Globals.prefs.getBoolean(JabRefPreferences.EXPORT_PRIMARY_SORT_DESCENDING);
secD = Globals.prefs.getBoolean(JabRefPreferences.EXPORT_SECONDARY_SORT_DESCENDING);
terD = Globals.prefs.getBoolean(JabRefPreferences.EXPORT_TERTIARY_SORT_DESCENDING);
} else {
// The setting is to save according to the current table order.
pri = Globals.prefs.get(JabRefPreferences.PRIMARY_SORT_FIELD);
sec = Globals.prefs.get(JabRefPreferences.SECONDARY_SORT_FIELD);
ter = Globals.prefs.get(JabRefPreferences.TERTIARY_SORT_FIELD);
priD = Globals.prefs.getBoolean(JabRefPreferences.PRIMARY_SORT_DESCENDING);
secD = Globals.prefs.getBoolean(JabRefPreferences.SECONDARY_SORT_DESCENDING);
terD = Globals.prefs.getBoolean(JabRefPreferences.TERTIARY_SORT_DESCENDING);
}
}
}
private static List<Comparator<BibtexEntry>> getSaveComparators(boolean isSaveOperation) {
SaveSettings saveSettings = new SaveSettings(isSaveOperation);
List<Comparator<BibtexEntry>> comparators = new ArrayList<Comparator<BibtexEntry>>();
if (isSaveOperation) {
comparators.add(new CrossRefEntryComparator());
}
comparators.add(new FieldComparator(saveSettings.pri, saveSettings.priD));
comparators.add(new FieldComparator(saveSettings.sec, saveSettings.secD));
comparators.add(new FieldComparator(saveSettings.ter, saveSettings.terD));
comparators.add(new FieldComparator(BibtexFields.KEY_FIELD));
return comparators;
}
/**
* Saves the database to file, including only the entries included in the
* supplied input array bes.
*
* @return A List containing warnings, if any.
*/
public static SaveSession savePartOfDatabase(BibtexDatabase database, MetaData metaData,
File file, JabRefPreferences prefs, BibtexEntry[] bes, String encoding) throws SaveException {
TreeMap<String, BibtexEntryType> types = new TreeMap<String, BibtexEntryType>(); // Map
// to
// collect
// entry
// type
// definitions
// that we must save along with entries using them.
BibtexEntry be = null;
boolean backup = prefs.getBoolean("backup");
SaveSession session;
try {
session = new SaveSession(file, encoding, backup);
} catch (IOException e) {
throw new SaveException(e.getMessage());
}
try {
// Define our data stream.
VerifyingWriter fw = session.getWriter();
// Write signature.
writeBibFileHeader(fw, encoding);
// Write preamble if there is one.
writePreamble(fw, database.getPreamble());
// Write strings if there are any.
writeStrings(fw, database);
// Write database entries. Take care, using CrossRefEntry-
// Comparator, that referred entries occur after referring
// ones. Apart from crossref requirements, entries will be
// sorted as they appear on the screen.
List<Comparator<BibtexEntry>> comparators = getSaveComparators(true);
// Use glazed lists to get a sorted view of the entries:
BasicEventList<BibtexEntry> entryList = new BasicEventList<BibtexEntry>();
SortedList<BibtexEntry> sorter = new SortedList<BibtexEntry>(entryList, new FieldComparatorStack<BibtexEntry>(comparators));
if ((bes != null) && (bes.length > 0)) {
Collections.addAll(sorter, bes);
}
FieldFormatter ff = new LatexFieldFormatter();
for (BibtexEntry aSorter : sorter) {
be = (aSorter);
// Check if we must write the type definition for this
// entry, as well. Our criterion is that all non-standard
// types (*not* customized standard types) must be written.
BibtexEntryType tp = be.getType();
if (BibtexEntryType.getStandardType(tp.getName()) == null) {
types.put(tp.getName(), tp);
}
be.write(fw, ff, true);
fw.write(Globals.NEWLINE);
}
// Write meta data.
if (metaData != null) {
metaData.writeMetaData(fw);
}
// Write type definitions, if any:
if (types.size() > 0) {
for (String s : types.keySet()) {
CustomEntryType tp = (CustomEntryType) types.get(s);
tp.save(fw);
fw.write(Globals.NEWLINE);
}
}
fw.close();
} catch (Throwable ex) {
session.cancel();
//repairAfterError(file, backup, status);
throw new SaveException(ex.getMessage(), be);
}
return session;
}
/**
* This method attempts to get a Reader for the file path given, either by
* loading it as a resource (from within jar), or as a normal file. If
* unsuccessful (e.g. file not found), an IOException is thrown.
*/
public static Reader getReader(String name) throws IOException {
Reader reader = null;
// Try loading as a resource first. This works for files inside the jar:
URL reso = Globals.class.getResource(name);
// If that didn't work, try loading as a normal file URL:
if (reso != null) {
try {
reader = new InputStreamReader(reso.openStream());
} catch (FileNotFoundException ex) {
throw new IOException(Globals.lang("Could not find layout file") + ": '" + name + "'.");
}
} else {
File f = new File(name);
try {
reader = new FileReader(f);
} catch (FileNotFoundException ex) {
throw new IOException(Globals.lang("Could not find layout file") + ": '" + name + "'.");
}
}
return reader;
}
/*
* We have begun to use getSortedEntries() for both database save operations
* and non-database save operations. In a non-database save operation
* (such as the exportDatabase call), we do not wish to use the
* global preference of saving in standard order.
*/
@SuppressWarnings("unchecked")
public static List<BibtexEntry> getSortedEntries(BibtexDatabase database, Set<String> keySet, boolean isSaveOperation) {
boolean inOriginalOrder = isSaveOperation ? Globals.prefs.getBoolean("saveInOriginalOrder")
: Globals.prefs.getBoolean("exportInOriginalOrder");
List<Comparator<BibtexEntry>> comparators;
if (inOriginalOrder) {
// Sort entries based on their creation order, utilizing the fact
// that IDs used for entries are increasing, sortable numbers.
comparators = new ArrayList<Comparator<BibtexEntry>>();
comparators.add(new CrossRefEntryComparator());
comparators.add(new IdComparator());
} else {
comparators = getSaveComparators(isSaveOperation);
}
// Use glazed lists to get a sorted view of the entries:
FieldComparatorStack<BibtexEntry> comparatorStack = new FieldComparatorStack<BibtexEntry>(comparators);
BasicEventList<BibtexEntry> entryList = new BasicEventList<BibtexEntry>();
SortedList<BibtexEntry> sorter = new SortedList<BibtexEntry>(entryList, comparatorStack);
if (keySet == null) {
keySet = database.getKeySet();
}
if (keySet != null) {
Iterator<String> i = keySet.iterator();
for (; i.hasNext();) {
sorter.add(database.getEntryById((i.next())));
}
}
return sorter;
}
/**
* @return true iff the entry has a nonzero value in its field.
*/
private static boolean nonZeroField(BibtexEntry be, String field) {
String o = (be.getField(field));
return ((o != null) && !o.equals("0"));
}
}
///////////////////////////////////////////////////////////////////////////////
// END OF FILE.
///////////////////////////////////////////////////////////////////////////////
| src/main/java/net/sf/jabref/export/FileActions.java | /* Copyright (C) 2003-2011 JabRef contributors.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package net.sf.jabref.export;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.Writer;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.sf.jabref.BibtexDatabase;
import net.sf.jabref.BibtexEntry;
import net.sf.jabref.BibtexEntryType;
import net.sf.jabref.BibtexFields;
import net.sf.jabref.BibtexString;
import net.sf.jabref.BibtexStringComparator;
import net.sf.jabref.CrossRefEntryComparator;
import net.sf.jabref.CustomEntryType;
import net.sf.jabref.FieldComparator;
import net.sf.jabref.FieldComparatorStack;
import net.sf.jabref.GUIGlobals;
import net.sf.jabref.Globals;
import net.sf.jabref.IdComparator;
import net.sf.jabref.JabRefPreferences;
import net.sf.jabref.MetaData;
import ca.odell.glazedlists.BasicEventList;
import ca.odell.glazedlists.SortedList;
public class FileActions {
private static Pattern refPat = Pattern.compile("(#[A-Za-z]+#)"); // Used to detect string references in strings
private static BibtexString.Type previousStringType;
private static void writePreamble(Writer fw, String preamble) throws IOException {
if (preamble != null) {
fw.write("@PREAMBLE{");
fw.write(preamble);
fw.write("}" + Globals.NEWLINE + Globals.NEWLINE);
}
}
/**
* Write all strings in alphabetical order, modified to produce a safe (for
* BibTeX) order of the strings if they reference each other.
*
* @param fw The Writer to send the output to.
* @param database The database whose strings we should write.
* @throws IOException If anthing goes wrong in writing.
*/
private static void writeStrings(Writer fw, BibtexDatabase database) throws IOException {
previousStringType = BibtexString.Type.AUTHOR;
List<BibtexString> strings = new ArrayList<BibtexString>();
for (String s : database.getStringKeySet()) {
strings.add(database.getString(s));
}
Collections.sort(strings, new BibtexStringComparator(true));
// First, make a Map of all entries:
HashMap<String, BibtexString> remaining = new HashMap<String, BibtexString>();
int maxKeyLength = 0;
for (BibtexString string : strings) {
remaining.put(string.getName(), string);
maxKeyLength = Math.max(maxKeyLength, string.getName().length());
}
for (BibtexString.Type t : BibtexString.Type.values()) {
for (BibtexString bs : strings) {
if (remaining.containsKey(bs.getName()) && bs.getType() == t) {
writeString(fw, bs, remaining, maxKeyLength);
}
}
}
fw.write(Globals.NEWLINE);
}
private static void writeString(Writer fw, BibtexString bs, HashMap<String, BibtexString> remaining, int maxKeyLength) throws IOException {
// First remove this from the "remaining" list so it can't cause problem with circular refs:
remaining.remove(bs.getName());
// Then we go through the string looking for references to other strings. If we find references
// to strings that we will write, but still haven't, we write those before proceeding. This ensures
// that the string order will be acceptable for BibTeX.
String content = bs.getContent();
Matcher m;
while ((m = refPat.matcher(content)).find()) {
String foundLabel = m.group(1);
int restIndex = content.indexOf(foundLabel) + foundLabel.length();
content = content.substring(restIndex);
Object referred = remaining.get(foundLabel.substring(1, foundLabel.length() - 1));
// If the label we found exists as a key in the "remaining" Map, we go on and write it now:
if (referred != null) {
writeString(fw, (BibtexString) referred, remaining, maxKeyLength);
}
}
if (previousStringType != bs.getType()) {
fw.write(Globals.NEWLINE);
previousStringType = bs.getType();
}
String suffix = "";
for (int i = maxKeyLength - bs.getName().length(); i > 0; i--) {
suffix += " ";
}
fw.write("@String { " + bs.getName() + suffix + " = ");
if (!bs.getContent().equals("")) {
try {
String formatted = (new LatexFieldFormatter()).format(bs.getContent(), Globals.BIBTEX_STRING);
fw.write(formatted);
} catch (IllegalArgumentException ex) {
throw new IllegalArgumentException(
Globals.lang("The # character is not allowed in BibTeX strings unless escaped as in '\\#'.") + "\n"
+ Globals.lang("Before saving, please edit any strings containing the # character."));
}
} else {
fw.write("{}");
}
fw.write(" }" + Globals.NEWLINE);// + Globals.NEWLINE);
}
/**
* Writes the JabRef signature and the encoding.
*
* @param encoding String the name of the encoding, which is part of the
* header.
*/
private static void writeBibFileHeader(Writer out, String encoding) throws IOException {
out.write("% ");
out.write(GUIGlobals.SIGNATURE);
out.write(" " + GUIGlobals.version + "." + Globals.NEWLINE + "% "
+ GUIGlobals.encPrefix + encoding + Globals.NEWLINE + Globals.NEWLINE);
}
/**
* Saves the database to file. Two boolean values indicate whether only
* entries with a nonzero Globals.SEARCH value and only entries with a
* nonzero Globals.GROUPSEARCH value should be saved. This can be used to
* let the user save only the results of a search. False and false means all
* entries are saved.
*/
public static SaveSession saveDatabase(BibtexDatabase database,
MetaData metaData, File file, JabRefPreferences prefs,
boolean checkSearch, boolean checkGroup, String encoding, boolean suppressBackup)
throws SaveException {
TreeMap<String, BibtexEntryType> types = new TreeMap<String, BibtexEntryType>();
boolean backup = prefs.getBoolean("backup");
if (suppressBackup) {
backup = false;
}
SaveSession session;
BibtexEntry exceptionCause = null;
try {
session = new SaveSession(file, encoding, backup);
} catch (Throwable e) {
if (encoding != null) {
System.err.println("Error from encoding: '" + encoding + "' Len: " + encoding.length());
}
// we must catch all exceptions to be able notify users that
// saving failed, no matter what the reason was
// (and they won't just quit JabRef thinking
// everyting worked and loosing data)
e.printStackTrace();
throw new SaveException(e.getMessage());
}
try {
// Get our data stream. This stream writes only to a temporary file,
// until committed.
VerifyingWriter fw = session.getWriter();
// Write signature.
writeBibFileHeader(fw, encoding);
// Write preamble if there is one.
writePreamble(fw, database.getPreamble());
// Write strings if there are any.
writeStrings(fw, database);
// Write database entries. Take care, using CrossRefEntry-
// Comparator, that referred entries occur after referring
// ones. Apart from crossref requirements, entries will be
// sorted as they appear on the screen.
List<BibtexEntry> sorter = getSortedEntries(database, null, true);
FieldFormatter ff = new LatexFieldFormatter();
for (BibtexEntry be : sorter) {
exceptionCause = be;
// Check if we must write the type definition for this
// entry, as well. Our criterion is that all non-standard
// types (*not* customized standard types) must be written.
BibtexEntryType tp = be.getType();
if (BibtexEntryType.getStandardType(tp.getName()) == null) {
types.put(tp.getName(), tp);
}
// Check if the entry should be written.
boolean write = true;
if (checkSearch && !nonZeroField(be, BibtexFields.SEARCH)) {
write = false;
}
if (checkGroup && !nonZeroField(be, BibtexFields.GROUPSEARCH)) {
write = false;
}
if (write) {
be.write(fw, ff, true);
fw.write(Globals.NEWLINE);
}
}
// Write meta data.
if (metaData != null) {
metaData.writeMetaData(fw);
}
// Write type definitions, if any:
if (types.size() > 0) {
for (String s : types.keySet()) {
BibtexEntryType type = types.get(s);
if (type instanceof CustomEntryType) {
CustomEntryType tp = (CustomEntryType) type;
tp.save(fw);
fw.write(Globals.NEWLINE);
}
}
}
fw.close();
} catch (Throwable ex) {
ex.printStackTrace();
session.cancel();
// repairAfterError(file, backup, INIT_OK);
throw new SaveException(ex.getMessage(), exceptionCause);
}
return session;
}
private static class SaveSettings {
public final String pri, sec, ter;
public final boolean priD, secD, terD;
public SaveSettings(boolean isSaveOperation) {
/* three options:
* 1. original order (saveInOriginalOrder) -- not hit here as SaveSettings is not called in that case
* 2. current table sort order
* 3. ordered by specified order
*/
// This case should never be hit as SaveSettings() is never called if InOriginalOrder is true
assert isSaveOperation && !Globals.prefs.getBoolean(JabRefPreferences.SAVE_IN_ORIGINAL_ORDER);
assert !isSaveOperation && !Globals.prefs.getBoolean(JabRefPreferences.EXPORT_IN_ORIGINAL_ORDER);
if (isSaveOperation && Globals.prefs.getBoolean(JabRefPreferences.SAVE_IN_SPECIFIED_ORDER)) {
pri = Globals.prefs.get(JabRefPreferences.SAVE_PRIMARY_SORT_FIELD);
sec = Globals.prefs.get(JabRefPreferences.SAVE_SECONDARY_SORT_FIELD);
ter = Globals.prefs.get(JabRefPreferences.SAVE_TERTIARY_SORT_FIELD);
priD = Globals.prefs.getBoolean(JabRefPreferences.SAVE_PRIMARY_SORT_DESCENDING);
secD = Globals.prefs.getBoolean(JabRefPreferences.SAVE_SECONDARY_SORT_DESCENDING);
terD = Globals.prefs.getBoolean(JabRefPreferences.SAVE_TERTIARY_SORT_DESCENDING);
} else if (!isSaveOperation && Globals.prefs.getBoolean(JabRefPreferences.EXPORT_IN_SPECIFIED_ORDER)) {
pri = Globals.prefs.get(JabRefPreferences.EXPORT_PRIMARY_SORT_FIELD);
sec = Globals.prefs.get(JabRefPreferences.EXPORT_SECONDARY_SORT_FIELD);
ter = Globals.prefs.get(JabRefPreferences.EXPORT_TERTIARY_SORT_FIELD);
priD = Globals.prefs.getBoolean(JabRefPreferences.EXPORT_PRIMARY_SORT_DESCENDING);
secD = Globals.prefs.getBoolean(JabRefPreferences.EXPORT_SECONDARY_SORT_DESCENDING);
terD = Globals.prefs.getBoolean(JabRefPreferences.EXPORT_TERTIARY_SORT_DESCENDING);
} else {
// The setting is to save according to the current table order.
pri = Globals.prefs.get(JabRefPreferences.PRIMARY_SORT_FIELD);
sec = Globals.prefs.get(JabRefPreferences.SECONDARY_SORT_FIELD);
ter = Globals.prefs.get(JabRefPreferences.TERTIARY_SORT_FIELD);
priD = Globals.prefs.getBoolean(JabRefPreferences.PRIMARY_SORT_DESCENDING);
secD = Globals.prefs.getBoolean(JabRefPreferences.SECONDARY_SORT_DESCENDING);
terD = Globals.prefs.getBoolean(JabRefPreferences.TERTIARY_SORT_DESCENDING);
}
}
}
private static List<Comparator<BibtexEntry>> getSaveComparators(boolean isSaveOperation) {
SaveSettings saveSettings = new SaveSettings(isSaveOperation);
List<Comparator<BibtexEntry>> comparators = new ArrayList<Comparator<BibtexEntry>>();
if (isSaveOperation) {
comparators.add(new CrossRefEntryComparator());
}
comparators.add(new FieldComparator(saveSettings.pri, saveSettings.priD));
comparators.add(new FieldComparator(saveSettings.sec, saveSettings.secD));
comparators.add(new FieldComparator(saveSettings.ter, saveSettings.terD));
comparators.add(new FieldComparator(BibtexFields.KEY_FIELD));
return comparators;
}
/**
* Saves the database to file, including only the entries included in the
* supplied input array bes.
*
* @return A List containing warnings, if any.
*/
@SuppressWarnings("unchecked")
public static SaveSession savePartOfDatabase(BibtexDatabase database, MetaData metaData,
File file, JabRefPreferences prefs, BibtexEntry[] bes, String encoding) throws SaveException {
TreeMap<String, BibtexEntryType> types = new TreeMap<String, BibtexEntryType>(); // Map
// to
// collect
// entry
// type
// definitions
// that we must save along with entries using them.
BibtexEntry be = null;
boolean backup = prefs.getBoolean("backup");
SaveSession session;
try {
session = new SaveSession(file, encoding, backup);
} catch (IOException e) {
throw new SaveException(e.getMessage());
}
try {
// Define our data stream.
VerifyingWriter fw = session.getWriter();
// Write signature.
writeBibFileHeader(fw, encoding);
// Write preamble if there is one.
writePreamble(fw, database.getPreamble());
// Write strings if there are any.
writeStrings(fw, database);
// Write database entries. Take care, using CrossRefEntry-
// Comparator, that referred entries occur after referring
// ones. Apart from crossref requirements, entries will be
// sorted as they appear on the screen.
List<Comparator<BibtexEntry>> comparators = getSaveComparators(true);
// Use glazed lists to get a sorted view of the entries:
BasicEventList<BibtexEntry> entryList = new BasicEventList<BibtexEntry>();
SortedList<BibtexEntry> sorter = new SortedList<BibtexEntry>(entryList, new FieldComparatorStack<BibtexEntry>(comparators));
if ((bes != null) && (bes.length > 0)) {
Collections.addAll(sorter, bes);
}
FieldFormatter ff = new LatexFieldFormatter();
for (BibtexEntry aSorter : sorter) {
be = (aSorter);
// Check if we must write the type definition for this
// entry, as well. Our criterion is that all non-standard
// types (*not* customized standard types) must be written.
BibtexEntryType tp = be.getType();
if (BibtexEntryType.getStandardType(tp.getName()) == null) {
types.put(tp.getName(), tp);
}
be.write(fw, ff, true);
fw.write(Globals.NEWLINE);
}
// Write meta data.
if (metaData != null) {
metaData.writeMetaData(fw);
}
// Write type definitions, if any:
if (types.size() > 0) {
for (String s : types.keySet()) {
CustomEntryType tp = (CustomEntryType) types.get(s);
tp.save(fw);
fw.write(Globals.NEWLINE);
}
}
fw.close();
} catch (Throwable ex) {
session.cancel();
//repairAfterError(file, backup, status);
throw new SaveException(ex.getMessage(), be);
}
return session;
}
/**
* This method attempts to get a Reader for the file path given, either by
* loading it as a resource (from within jar), or as a normal file. If
* unsuccessful (e.g. file not found), an IOException is thrown.
*/
public static Reader getReader(String name) throws IOException {
Reader reader = null;
// Try loading as a resource first. This works for files inside the jar:
URL reso = Globals.class.getResource(name);
// If that didn't work, try loading as a normal file URL:
if (reso != null) {
try {
reader = new InputStreamReader(reso.openStream());
} catch (FileNotFoundException ex) {
throw new IOException(Globals.lang("Could not find layout file") + ": '" + name + "'.");
}
} else {
File f = new File(name);
try {
reader = new FileReader(f);
} catch (FileNotFoundException ex) {
throw new IOException(Globals.lang("Could not find layout file") + ": '" + name + "'.");
}
}
return reader;
}
/*
* We have begun to use getSortedEntries() for both database save operations
* and non-database save operations. In a non-database save operation
* (such as the exportDatabase call), we do not wish to use the
* global preference of saving in standard order.
*/
@SuppressWarnings("unchecked")
public static List<BibtexEntry> getSortedEntries(BibtexDatabase database, Set<String> keySet, boolean isSaveOperation) {
boolean inOriginalOrder = isSaveOperation ? Globals.prefs.getBoolean("saveInOriginalOrder")
: Globals.prefs.getBoolean("exportInOriginalOrder");
List<Comparator<BibtexEntry>> comparators;
if (inOriginalOrder) {
// Sort entries based on their creation order, utilizing the fact
// that IDs used for entries are increasing, sortable numbers.
comparators = new ArrayList<Comparator<BibtexEntry>>();
comparators.add(new CrossRefEntryComparator());
comparators.add(new IdComparator());
} else {
comparators = getSaveComparators(isSaveOperation);
}
// Use glazed lists to get a sorted view of the entries:
FieldComparatorStack<BibtexEntry> comparatorStack = new FieldComparatorStack<BibtexEntry>(comparators);
BasicEventList<BibtexEntry> entryList = new BasicEventList<BibtexEntry>();
SortedList<BibtexEntry> sorter = new SortedList<BibtexEntry>(entryList, comparatorStack);
if (keySet == null) {
keySet = database.getKeySet();
}
if (keySet != null) {
Iterator<String> i = keySet.iterator();
for (; i.hasNext();) {
sorter.add(database.getEntryById((i.next())));
}
}
return sorter;
}
/**
* @return true iff the entry has a nonzero value in its field.
*/
private static boolean nonZeroField(BibtexEntry be, String field) {
String o = (be.getField(field));
return ((o != null) && !o.equals("0"));
}
}
///////////////////////////////////////////////////////////////////////////////
// END OF FILE.
///////////////////////////////////////////////////////////////////////////////
| Removes unnecessary suppress warning "unchecked"
| src/main/java/net/sf/jabref/export/FileActions.java | Removes unnecessary suppress warning "unchecked" | <ide><path>rc/main/java/net/sf/jabref/export/FileActions.java
<ide> *
<ide> * @return A List containing warnings, if any.
<ide> */
<del> @SuppressWarnings("unchecked")
<ide> public static SaveSession savePartOfDatabase(BibtexDatabase database, MetaData metaData,
<ide> File file, JabRefPreferences prefs, BibtexEntry[] bes, String encoding) throws SaveException {
<ide> |
|
Java | bsd-2-clause | d0a0c6928ad9e9e21ad3dd6b218aca0e4ae61d9f | 0 | KronosDesign/runelite,devinfrench/runelite,l2-/runelite,abelbriggs1/runelite,Noremac201/runelite,devinfrench/runelite,Sethtroll/runelite,l2-/runelite,runelite/runelite,runelite/runelite,Noremac201/runelite,abelbriggs1/runelite,runelite/runelite,KronosDesign/runelite,abelbriggs1/runelite,Sethtroll/runelite | /*
* Copyright (c) 2017, Aria <[email protected]>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.plugins.mousehighlight;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.awt.Point;
import javax.annotation.Nullable;
import javax.inject.Inject;
import net.runelite.api.Client;
import net.runelite.api.MenuEntry;
import net.runelite.client.ui.overlay.Overlay;
import net.runelite.client.ui.overlay.OverlayPosition;
import net.runelite.client.ui.overlay.tooltip.Tooltip;
import net.runelite.client.ui.overlay.tooltip.TooltipManager;
class MouseHighlightOverlay extends Overlay
{
private final MouseHighlightConfig config;
private final TooltipManager tooltipManager;
private final Client client;
@Inject
MouseHighlightOverlay(@Nullable Client client, MouseHighlightConfig config, TooltipManager tooltipManager)
{
setPosition(OverlayPosition.DYNAMIC);
this.client = client;
this.config = config;
this.tooltipManager = tooltipManager;
}
@Override
public Dimension render(Graphics2D graphics, Point point)
{
if (!config.enabled())
{
return null;
}
if (client.isMenuOpen())
{
return null;
}
MenuEntry[] menuEntries = client.getMenuEntries();
int last = menuEntries.length - 1;
if (last < 0)
{
return null;
}
MenuEntry menuEntry = menuEntries[last];
String target = menuEntry.getTarget();
String option = menuEntry.getOption();
if (target.isEmpty())
{
return null;
}
// Trivial options that don't need to be highlighted, add more as they appear.
switch (option)
{
case "Walk here":
case "Cancel":
case "Continue":
return null;
case "Move":
// Hide overlay on sliding puzzle boxes
if (target.contains("Sliding piece"))
{
return null;
}
}
tooltipManager.add(new Tooltip(option + " " + target));
return null;
}
}
| runelite-client/src/main/java/net/runelite/client/plugins/mousehighlight/MouseHighlightOverlay.java | /*
* Copyright (c) 2017, Aria <[email protected]>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.plugins.mousehighlight;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.awt.Point;
import javax.annotation.Nullable;
import javax.inject.Inject;
import net.runelite.api.Client;
import net.runelite.api.MenuEntry;
import net.runelite.client.ui.overlay.Overlay;
import net.runelite.client.ui.overlay.OverlayPosition;
import net.runelite.client.ui.overlay.tooltip.Tooltip;
import net.runelite.client.ui.overlay.tooltip.TooltipManager;
class MouseHighlightOverlay extends Overlay
{
private final MouseHighlightConfig config;
private final TooltipManager tooltipManager;
private final Client client;
@Inject
MouseHighlightOverlay(@Nullable Client client, MouseHighlightConfig config, TooltipManager tooltipManager)
{
setPosition(OverlayPosition.DYNAMIC);
this.client = client;
this.config = config;
this.tooltipManager = tooltipManager;
}
@Override
public Dimension render(Graphics2D graphics, Point point)
{
if (!config.enabled())
{
return null;
}
if (client.isMenuOpen())
{
return null;
}
MenuEntry[] menuEntries = client.getMenuEntries();
int last = menuEntries.length - 1;
if (last < 0)
{
return null;
}
MenuEntry menuEntry = menuEntries[last];
String target = menuEntry.getTarget();
String option = menuEntry.getOption();
if (target.isEmpty())
{
return null;
}
// Trivial options that don't need to be highlighted, add more as they appear.
switch (option)
{
case "Walk here":
case "Cancel":
case "Continue":
return null;
}
tooltipManager.add(new Tooltip(option + " " + target));
return null;
}
}
| mousehighlight: don't show tooltip on sliding puzzles
| runelite-client/src/main/java/net/runelite/client/plugins/mousehighlight/MouseHighlightOverlay.java | mousehighlight: don't show tooltip on sliding puzzles | <ide><path>unelite-client/src/main/java/net/runelite/client/plugins/mousehighlight/MouseHighlightOverlay.java
<ide> case "Cancel":
<ide> case "Continue":
<ide> return null;
<add> case "Move":
<add> // Hide overlay on sliding puzzle boxes
<add> if (target.contains("Sliding piece"))
<add> {
<add> return null;
<add> }
<ide> }
<ide>
<ide> tooltipManager.add(new Tooltip(option + " " + target)); |
|
Java | agpl-3.0 | ae3d00b5795d58dad8d4edc5c72e38587844b312 | 0 | VietOpenCPS/opencps-v2,VietOpenCPS/opencps-v2 | package org.opencps.api.controller.impl;
import java.util.List;
import java.util.Locale;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import org.apache.commons.httpclient.util.HttpURLConnection;
import org.opencps.api.controller.RegistrationTemplatesManagement;
import org.opencps.api.controller.exception.ErrorMsg;
import org.opencps.api.controller.util.RegistrationTemplatesUtils;
import org.opencps.api.registrationtemplate.model.RegistrationTemplateDetailModel;
import org.opencps.api.registrationtemplate.model.RegistrationTemplateFormReportInputUpdateModel;
import org.opencps.api.registrationtemplate.model.RegistrationTemplateFormScriptInputUpdateModel;
import org.opencps.api.registrationtemplate.model.RegistrationTemplateInputModel;
import org.opencps.api.registrationtemplate.model.RegistrationTemplateSampleDataInputUpdateModel;
import org.opencps.api.registrationtemplate.model.RegistrationTemplatesResultsModel;
import org.opencps.auth.api.BackendAuth;
import org.opencps.auth.api.BackendAuthImpl;
import org.opencps.auth.api.exception.UnauthenticationException;
import org.opencps.auth.api.exception.UnauthorizationException;
import org.opencps.dossiermgt.action.RegistrationTemplatesActions;
import org.opencps.dossiermgt.action.impl.RegistrationTemplatesActionsImpl;
import org.opencps.dossiermgt.model.RegistrationTemplates;
import org.opencps.dossiermgt.service.RegistrationTemplatesLocalServiceUtil;
import com.liferay.portal.kernel.json.JSONFactoryUtil;
import com.liferay.portal.kernel.json.JSONObject;
import com.liferay.portal.kernel.model.Company;
import com.liferay.portal.kernel.model.User;
import com.liferay.portal.kernel.service.ServiceContext;
import com.liferay.portal.kernel.util.GetterUtil;
import com.liferay.portal.kernel.util.Validator;
public class RegistrationTemplatesManagementImpl implements RegistrationTemplatesManagement {
@Override
public Response getRegistrationTemplates(HttpServletRequest request, HttpHeaders header, Company company,
Locale locale, User user, ServiceContext serviceContext, String formNo, String govAgencyCode) {
// TODO Get All RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
int start = 0, end = 0;
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesResultsModel results = new RegistrationTemplatesResultsModel();
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
JSONObject registrationTemplateJsonObject = JSONFactoryUtil.createJSONObject();
if(Validator.isNull(formNo) && Validator.isNull(govAgencyCode)){
registrationTemplateJsonObject = action.getRegistrationTemplates(groupId, start, end);
}else{
registrationTemplateJsonObject = action.getRegistrationTemplates(formNo, govAgencyCode);
}
List<RegistrationTemplates> lstRegistrationTemplate = (List<RegistrationTemplates>) registrationTemplateJsonObject
.get("lstRegistrationTemplate");
results.setTotal(registrationTemplateJsonObject.getInt("total"));
results.getData().addAll(
RegistrationTemplatesUtils.mappingToRegistrationTemplatesResultsModel(lstRegistrationTemplate));
return Response.status(200).entity(results).build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response addRegistrationTemplate(HttpServletRequest request, HttpHeaders header, Company company,
Locale locale, User user, ServiceContext serviceContext, RegistrationTemplateInputModel input) {
// TODO Add RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
RegistrationTemplates registrationTemplate = action.addRegistrationTemplate(groupId,
input.getGovAgencyCode(), input.getGovAgencyName(), input.getFormNo(), input.getFormName(),
input.isMultiple(), input.getFormScript(), input.getFormReport(), input.getSampleData(),
serviceContext);
RegistrationTemplateDetailModel result = RegistrationTemplatesUtils
.mappingToRegistrationTemplateModel(registrationTemplate);
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response updateRegistrationTemplate(HttpServletRequest request, HttpHeaders header, Company company,
Locale locale, User user, ServiceContext serviceContext, RegistrationTemplateInputModel input,
long registrationTemplateId) {
// TODO Update RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
RegistrationTemplates registrationTemplate = action.updateRegistrationTemplates(groupId,
registrationTemplateId, input.getGovAgencyCode(), input.getGovAgencyName(), input.getFormNo(),
input.getFormName(), input.isMultiple(), input.getFormScript(), input.getFormReport(),
input.getSampleData(), serviceContext);
RegistrationTemplateDetailModel result = RegistrationTemplatesUtils
.mappingToRegistrationTemplateModel(registrationTemplate);
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response removeRegistrationTemplate(HttpServletRequest request, HttpHeaders header, Company company,
Locale locale, User user, ServiceContext serviceContext, long registrationTemplateId) {
// TODO Remove RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
action.removeRegistrationTemplate(groupId, registrationTemplateId);
return Response.status(200).entity("Success").build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response getFormScriptByRegistrationTemplateId(HttpServletRequest request, HttpHeaders header,
Company company, Locale locale, User user, ServiceContext serviceContext, long registrationTemplateId) {
// TODO Get FormScript of RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
RegistrationTemplateFormScriptInputUpdateModel result = new RegistrationTemplateFormScriptInputUpdateModel();
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplates registrationTemplate = RegistrationTemplatesLocalServiceUtil
.getRegistrationTemplates(registrationTemplateId);
result.setFormScript(registrationTemplate.getFormScript());
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
public Response updateRegistrationTemplateFormScript(HttpServletRequest request, HttpHeaders header,
Company company, Locale locale, User user, ServiceContext serviceContext, long registrationTemplateId,
String formScript) {
// TODO Update FormScript of RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
RegistrationTemplates registrationTemplate = action.updateFormScript(groupId, registrationTemplateId,
formScript, serviceContext);
RegistrationTemplateDetailModel result = RegistrationTemplatesUtils
.mappingToRegistrationTemplateModel(registrationTemplate);
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response getFormReportByRegistrationTemplateId(HttpServletRequest request, HttpHeaders header,
Company company, Locale locale, User user, ServiceContext serviceContext, long registrationTemplateId) {
// TODO Get FormReport of RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
RegistrationTemplateFormReportInputUpdateModel result = new RegistrationTemplateFormReportInputUpdateModel();
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplates registrationTemplate = RegistrationTemplatesLocalServiceUtil
.getRegistrationTemplates(registrationTemplateId);
result.setFormReport(registrationTemplate.getFormReport());
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
public Response updateRegistrationTemplateFormReport(HttpServletRequest request, HttpHeaders header,
Company company, Locale locale, User user, ServiceContext serviceContext, long registrationTemplateId,
String formReport) {
// TODO Update FormReport of RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
RegistrationTemplates registrationTemplate = action.updateFormReport(groupId, registrationTemplateId,
formReport, serviceContext);
RegistrationTemplateDetailModel result = RegistrationTemplatesUtils
.mappingToRegistrationTemplateModel(registrationTemplate);
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response getSampleDataByRegistrationTemplateId(HttpServletRequest request, HttpHeaders header,
Company company, Locale locale, User user, ServiceContext serviceContext, long registrationTemplateId) {
// TODO Get SampleData of RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
RegistrationTemplateSampleDataInputUpdateModel result = new RegistrationTemplateSampleDataInputUpdateModel();
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplates registrationTemplate = RegistrationTemplatesLocalServiceUtil
.getRegistrationTemplates(registrationTemplateId);
result.setSampleData(registrationTemplate.getSampleData());
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
public Response updateRegistrationTemplateSampleData(HttpServletRequest request, HttpHeaders header,
Company company, Locale locale, User user, ServiceContext serviceContext, long registrationTemplatesId,
String sampleData) {
// TODO Update FormReport of RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
RegistrationTemplates registrationTemplate = action.updateSampledata(groupId, registrationTemplatesId,
sampleData, serviceContext);
RegistrationTemplateDetailModel result = RegistrationTemplatesUtils
.mappingToRegistrationTemplateModel(registrationTemplate);
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response getRegistrationTemplatebyId(HttpServletRequest request, HttpHeaders header, Company company,
Locale locale, User user, ServiceContext serviceContext, String id) {
// TODO Get RegistrationTemplates by Id
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplates registrationTemplates = RegistrationTemplatesLocalServiceUtil
.getRegistrationTemplatebyId(groupId, id);
RegistrationTemplateDetailModel result = RegistrationTemplatesUtils
.mappingToRegistrationTemplateModel(registrationTemplates);
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
private Response processException(Exception e) {
ErrorMsg error = new ErrorMsg();
if (e instanceof UnauthenticationException) {
error.setMessage("Non-Authoritative Information.");
error.setCode(HttpURLConnection.HTTP_NOT_AUTHORITATIVE);
error.setDescription("Non-Authoritative Information.");
return Response.status(HttpURLConnection.HTTP_NOT_AUTHORITATIVE).entity(error).build();
} else {
if (e instanceof UnauthorizationException) {
error.setMessage("Unauthorized.");
error.setCode(HttpURLConnection.HTTP_NOT_AUTHORITATIVE);
error.setDescription("Unauthorized.");
return Response.status(HttpURLConnection.HTTP_UNAUTHORIZED).entity(error).build();
} else {
error.setMessage("No Content.");
error.setCode(HttpURLConnection.HTTP_FORBIDDEN);
error.setDescription("No Content.");
return Response.status(HttpURLConnection.HTTP_FORBIDDEN).entity(error).build();
}
}
}
}
| modules/backend-api-rest/src/main/java/org/opencps/api/controller/impl/RegistrationTemplatesManagementImpl.java | package org.opencps.api.controller.impl;
import java.util.List;
import java.util.Locale;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import org.apache.commons.httpclient.util.HttpURLConnection;
import org.opencps.api.controller.RegistrationTemplatesManagement;
import org.opencps.api.controller.exception.ErrorMsg;
import org.opencps.api.controller.util.RegistrationTemplatesUtils;
import org.opencps.api.registrationtemplate.model.RegistrationTemplateDetailModel;
import org.opencps.api.registrationtemplate.model.RegistrationTemplateFormReportInputUpdateModel;
import org.opencps.api.registrationtemplate.model.RegistrationTemplateFormScriptInputUpdateModel;
import org.opencps.api.registrationtemplate.model.RegistrationTemplateInputModel;
import org.opencps.api.registrationtemplate.model.RegistrationTemplateSampleDataInputUpdateModel;
import org.opencps.api.registrationtemplate.model.RegistrationTemplatesResultsModel;
import org.opencps.auth.api.BackendAuth;
import org.opencps.auth.api.BackendAuthImpl;
import org.opencps.auth.api.exception.UnauthenticationException;
import org.opencps.auth.api.exception.UnauthorizationException;
import org.opencps.dossiermgt.action.RegistrationTemplatesActions;
import org.opencps.dossiermgt.action.impl.RegistrationTemplatesActionsImpl;
import org.opencps.dossiermgt.model.RegistrationTemplates;
import org.opencps.dossiermgt.service.RegistrationTemplatesLocalServiceUtil;
import com.liferay.portal.kernel.json.JSONObject;
import com.liferay.portal.kernel.model.Company;
import com.liferay.portal.kernel.model.User;
import com.liferay.portal.kernel.service.ServiceContext;
import com.liferay.portal.kernel.util.GetterUtil;
public class RegistrationTemplatesManagementImpl implements RegistrationTemplatesManagement {
@Override
public Response getRegistrationTemplates(HttpServletRequest request, HttpHeaders header, Company company,
Locale locale, User user, ServiceContext serviceContext, String formNo, String govAgencyCode) {
// TODO Get All RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
int start = 0, end = 0;
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesResultsModel results = new RegistrationTemplatesResultsModel();
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
JSONObject registrationTemplateJsonObject = action.getRegistrationTemplates(formNo, govAgencyCode);
List<RegistrationTemplates> lstRegistrationTemplate = (List<RegistrationTemplates>) registrationTemplateJsonObject
.get("lstRegistrationTemplate");
results.setTotal(registrationTemplateJsonObject.getInt("total"));
results.getData().addAll(
RegistrationTemplatesUtils.mappingToRegistrationTemplatesResultsModel(lstRegistrationTemplate));
return Response.status(200).entity(results).build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response addRegistrationTemplate(HttpServletRequest request, HttpHeaders header, Company company,
Locale locale, User user, ServiceContext serviceContext, RegistrationTemplateInputModel input) {
// TODO Add RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
RegistrationTemplates registrationTemplate = action.addRegistrationTemplate(groupId,
input.getGovAgencyCode(), input.getGovAgencyName(), input.getFormNo(), input.getFormName(),
input.isMultiple(), input.getFormScript(), input.getFormReport(), input.getSampleData(),
serviceContext);
RegistrationTemplateDetailModel result = RegistrationTemplatesUtils
.mappingToRegistrationTemplateModel(registrationTemplate);
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response updateRegistrationTemplate(HttpServletRequest request, HttpHeaders header, Company company,
Locale locale, User user, ServiceContext serviceContext, RegistrationTemplateInputModel input,
long registrationTemplateId) {
// TODO Update RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
RegistrationTemplates registrationTemplate = action.updateRegistrationTemplates(groupId,
registrationTemplateId, input.getGovAgencyCode(), input.getGovAgencyName(), input.getFormNo(),
input.getFormName(), input.isMultiple(), input.getFormScript(), input.getFormReport(),
input.getSampleData(), serviceContext);
RegistrationTemplateDetailModel result = RegistrationTemplatesUtils
.mappingToRegistrationTemplateModel(registrationTemplate);
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response removeRegistrationTemplate(HttpServletRequest request, HttpHeaders header, Company company,
Locale locale, User user, ServiceContext serviceContext, long registrationTemplateId) {
// TODO Remove RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
action.removeRegistrationTemplate(groupId, registrationTemplateId);
return Response.status(200).entity("Success").build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response getFormScriptByRegistrationTemplateId(HttpServletRequest request, HttpHeaders header,
Company company, Locale locale, User user, ServiceContext serviceContext, long registrationTemplateId) {
// TODO Get FormScript of RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
RegistrationTemplateFormScriptInputUpdateModel result = new RegistrationTemplateFormScriptInputUpdateModel();
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplates registrationTemplate = RegistrationTemplatesLocalServiceUtil
.getRegistrationTemplates(registrationTemplateId);
result.setFormScript(registrationTemplate.getFormScript());
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
public Response updateRegistrationTemplateFormScript(HttpServletRequest request, HttpHeaders header,
Company company, Locale locale, User user, ServiceContext serviceContext, long registrationTemplateId,
String formScript) {
// TODO Update FormScript of RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
RegistrationTemplates registrationTemplate = action.updateFormScript(groupId, registrationTemplateId,
formScript, serviceContext);
RegistrationTemplateDetailModel result = RegistrationTemplatesUtils
.mappingToRegistrationTemplateModel(registrationTemplate);
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response getFormReportByRegistrationTemplateId(HttpServletRequest request, HttpHeaders header,
Company company, Locale locale, User user, ServiceContext serviceContext, long registrationTemplateId) {
// TODO Get FormReport of RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
RegistrationTemplateFormReportInputUpdateModel result = new RegistrationTemplateFormReportInputUpdateModel();
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplates registrationTemplate = RegistrationTemplatesLocalServiceUtil
.getRegistrationTemplates(registrationTemplateId);
result.setFormReport(registrationTemplate.getFormReport());
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
public Response updateRegistrationTemplateFormReport(HttpServletRequest request, HttpHeaders header,
Company company, Locale locale, User user, ServiceContext serviceContext, long registrationTemplateId,
String formReport) {
// TODO Update FormReport of RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
RegistrationTemplates registrationTemplate = action.updateFormReport(groupId, registrationTemplateId,
formReport, serviceContext);
RegistrationTemplateDetailModel result = RegistrationTemplatesUtils
.mappingToRegistrationTemplateModel(registrationTemplate);
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response getSampleDataByRegistrationTemplateId(HttpServletRequest request, HttpHeaders header,
Company company, Locale locale, User user, ServiceContext serviceContext, long registrationTemplateId) {
// TODO Get SampleData of RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
RegistrationTemplateSampleDataInputUpdateModel result = new RegistrationTemplateSampleDataInputUpdateModel();
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplates registrationTemplate = RegistrationTemplatesLocalServiceUtil
.getRegistrationTemplates(registrationTemplateId);
result.setSampleData(registrationTemplate.getSampleData());
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
public Response updateRegistrationTemplateSampleData(HttpServletRequest request, HttpHeaders header,
Company company, Locale locale, User user, ServiceContext serviceContext, long registrationTemplatesId,
String sampleData) {
// TODO Update FormReport of RegistrationTemplates
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
RegistrationTemplates registrationTemplate = action.updateSampledata(groupId, registrationTemplatesId,
sampleData, serviceContext);
RegistrationTemplateDetailModel result = RegistrationTemplatesUtils
.mappingToRegistrationTemplateModel(registrationTemplate);
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
@Override
public Response getRegistrationTemplatebyId(HttpServletRequest request, HttpHeaders header, Company company,
Locale locale, User user, ServiceContext serviceContext, String id) {
// TODO Get RegistrationTemplates by Id
BackendAuth auth = new BackendAuthImpl();
long groupId = GetterUtil.getLong(header.getHeaderString("groupId"));
try {
if (!auth.isAuth(serviceContext)) {
throw new UnauthenticationException();
}
RegistrationTemplates registrationTemplates = RegistrationTemplatesLocalServiceUtil
.getRegistrationTemplatebyId(groupId, id);
RegistrationTemplateDetailModel result = RegistrationTemplatesUtils
.mappingToRegistrationTemplateModel(registrationTemplates);
return Response.status(200).entity(result).build();
} catch (Exception e) {
return processException(e);
}
}
private Response processException(Exception e) {
ErrorMsg error = new ErrorMsg();
if (e instanceof UnauthenticationException) {
error.setMessage("Non-Authoritative Information.");
error.setCode(HttpURLConnection.HTTP_NOT_AUTHORITATIVE);
error.setDescription("Non-Authoritative Information.");
return Response.status(HttpURLConnection.HTTP_NOT_AUTHORITATIVE).entity(error).build();
} else {
if (e instanceof UnauthorizationException) {
error.setMessage("Unauthorized.");
error.setCode(HttpURLConnection.HTTP_NOT_AUTHORITATIVE);
error.setDescription("Unauthorized.");
return Response.status(HttpURLConnection.HTTP_UNAUTHORIZED).entity(error).build();
} else {
error.setMessage("No Content.");
error.setCode(HttpURLConnection.HTTP_FORBIDDEN);
error.setDescription("No Content.");
return Response.status(HttpURLConnection.HTTP_FORBIDDEN).entity(error).build();
}
}
}
}
| fix get registrationTemplate | modules/backend-api-rest/src/main/java/org/opencps/api/controller/impl/RegistrationTemplatesManagementImpl.java | fix get registrationTemplate | <ide><path>odules/backend-api-rest/src/main/java/org/opencps/api/controller/impl/RegistrationTemplatesManagementImpl.java
<ide> import org.opencps.dossiermgt.model.RegistrationTemplates;
<ide> import org.opencps.dossiermgt.service.RegistrationTemplatesLocalServiceUtil;
<ide>
<add>import com.liferay.portal.kernel.json.JSONFactoryUtil;
<ide> import com.liferay.portal.kernel.json.JSONObject;
<ide> import com.liferay.portal.kernel.model.Company;
<ide> import com.liferay.portal.kernel.model.User;
<ide> import com.liferay.portal.kernel.service.ServiceContext;
<ide> import com.liferay.portal.kernel.util.GetterUtil;
<add>import com.liferay.portal.kernel.util.Validator;
<ide>
<ide> public class RegistrationTemplatesManagementImpl implements RegistrationTemplatesManagement {
<ide>
<ide> RegistrationTemplatesResultsModel results = new RegistrationTemplatesResultsModel();
<ide>
<ide> RegistrationTemplatesActions action = new RegistrationTemplatesActionsImpl();
<del>
<del> JSONObject registrationTemplateJsonObject = action.getRegistrationTemplates(formNo, govAgencyCode);
<add>
<add> JSONObject registrationTemplateJsonObject = JSONFactoryUtil.createJSONObject();
<add>
<add> if(Validator.isNull(formNo) && Validator.isNull(govAgencyCode)){
<add> registrationTemplateJsonObject = action.getRegistrationTemplates(groupId, start, end);
<add> }else{
<add> registrationTemplateJsonObject = action.getRegistrationTemplates(formNo, govAgencyCode);
<add> }
<ide>
<ide> List<RegistrationTemplates> lstRegistrationTemplate = (List<RegistrationTemplates>) registrationTemplateJsonObject
<ide> .get("lstRegistrationTemplate"); |
|
Java | mit | 577d14904ac06ebb6dbb2aee7a0015b672664e6c | 0 | Bathlamos/RTDC,Bathlamos/RTDC,Bathlamos/RTDC | package rtdc.web.server.service;
import rtdc.core.Config;
import rtdc.core.model.User;
import java.sql.*;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.logging.Level;
import java.util.logging.Logger;
public class AsteriskRealTimeService {
// Run Asterisk requests on separate threads to not cause server latency
private static ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(4);
private static Connection getConnection(){
try {
Class.forName("com.mysql.jdbc.Driver");
String hostname = Config.ASTERISK_IP;
String dbName = "asterisk_realtime";
String dbUserName = "user";
String dbPassword = "password";
DriverManager.setLoginTimeout(3); // The time (in seconds) it takes until we declare the connection request timed out
return DriverManager.getConnection("jdbc:mysql://"+hostname+"/"+dbName, dbUserName, dbPassword);
} catch (ClassNotFoundException e) {
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(Level.SEVERE, "MySQL JDBC Driver not found!");
} catch (SQLException e) {
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(Level.SEVERE, "Could not connect to the Asterisk database; " + e.getMessage());
}
return null;
}
public static void addUser(final User user, final String password){
executor.submit(new Runnable(){
@Override
public void run() {
Connection connection = getConnection();
if (connection == null) {
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(
Level.SEVERE, user.getUsername() + " was NOT added to Asterisk; no connection was established");
return;
}
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(Level.INFO, "Adding user " + user.getUsername() + " to Asterisk with extension " + user.getId());
String sipQuery = "INSERT INTO sip_buddies (" +
" NAME, defaultuser, callerid, secret, context, HOST, TYPE) VALUES (" +
" ?," + " ?," + " ?," + " ?," +
" 'users', 'dynamic', 'friend');"; // context, host, type
String extensionQuery = "INSERT INTO extensions (context, exten, priority, app, appdata) VALUES (" +
" 'users', ?, 1, 'Dial', ?);";
try {
PreparedStatement sipStatement = connection.prepareStatement(sipQuery);
sipStatement.setString(1, user.getUsername()); // name
sipStatement.setString(2, user.getUsername()); // defaultuser
sipStatement.setString(3, user.getFirstName() + " " + user.getLastName()); // callerid
sipStatement.setString(4, password); // secret
sipStatement.executeUpdate();
PreparedStatement extensionStatement = connection.prepareStatement(extensionQuery);
extensionStatement.setInt(1, user.getId()); // exten
extensionStatement.setString(2, "SIP/" + user.getUsername()); // appdata
extensionStatement.executeUpdate();
} catch (SQLException e) {
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(Level.SEVERE, "SQL Query failed: " + sipQuery);
e.printStackTrace();
} finally {
try {
connection.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
});
}
public static void deleteUser(final User user){
executor.submit(new Runnable() {
@Override
public void run() {
Connection connection = getConnection();
if (connection == null) {
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(
Level.SEVERE, "User " + user.getUsername() + " could not be deleted from Asterisk; no connection was established");
return;
}
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(Level.INFO, "Deleting user " + user.getUsername() + " from Asterisk");
String sipQuery = "DELETE FROM sip_buddies WHERE NAME=?;";
String extensionQuery = "DELETE FROM extensions WHERE exten=?;";
try {
PreparedStatement sipStatement = connection.prepareStatement(sipQuery);
sipStatement.setString(1, user.getUsername());
sipStatement.executeUpdate();
PreparedStatement extensionStatement = connection.prepareStatement(extensionQuery);
extensionStatement.setInt(1, user.getId());
extensionStatement.executeUpdate();
} catch (SQLException e) {
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(Level.SEVERE, "SQL Query failed: " + sipQuery);
e.printStackTrace();
} finally {
try {
connection.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
});
}
}
| web/src/main/java/rtdc/web/server/service/AsteriskRealTimeService.java | package rtdc.web.server.service;
import rtdc.core.Config;
import rtdc.core.model.User;
import java.sql.*;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.logging.Level;
import java.util.logging.Logger;
public class AsteriskRealTimeService {
// Run Asterisk requests on separate threads to not cause server latency
private static ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(4);
private static Connection getConnection(){
try {
Class.forName("com.mysql.jdbc.Driver");
String hostname = Config.ASTERISK_IP;
String dbName = "asterisk_realtime";
String dbUserName = "user";
String dbPassword = "password";
DriverManager.setLoginTimeout(3); // The time (in seconds) it takes until we declare the connection request timed out
return DriverManager.getConnection("jdbc:mysql://"+hostname+"/"+dbName, dbUserName, dbPassword);
} catch (ClassNotFoundException e) {
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(Level.SEVERE, "MySQL JDBC Driver not found!");
} catch (SQLException e) {
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(Level.SEVERE, "Could not connect to the Asterisk database; " + e.getMessage());
}
return null;
}
public static void addUser(final User user, final String password){
executor.submit(new Runnable(){
@Override
public void run() {
Connection connection = getConnection();
if (connection == null) {
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(
Level.SEVERE, user.getUsername() + " was NOT added to Asterisk; no connection was established");
return;
}
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(Level.INFO, "Adding user " + user.getUsername() + " to Asterisk with extension " + user.getId());
String sipQuery = "INSERT INTO sip_buddies (" +
" NAME, defaultuser, callerid, secret, context, HOST, TYPE, allow) VALUES (" +
" ?," + " ?," + " ?," + " ?," +
" 'users', 'dynamic', 'friend'," + // context, host, type
" 'g729;ilbc;gsm;ulaw;alaw;vp8');"; // allow (should be updated with which codecs we want users to be able to use)
String extensionQuery = "INSERT INTO extensions (context, exten, priority, app, appdata) VALUES (" +
" 'users', ?, 1, 'Dial', ?);";
try {
PreparedStatement sipStatement = connection.prepareStatement(sipQuery);
sipStatement.setString(1, user.getUsername()); // name
sipStatement.setString(2, user.getUsername()); // defaultuser
sipStatement.setString(3, user.getFirstName() + " " + user.getLastName()); // callerid
sipStatement.setString(4, password); // secret
sipStatement.executeUpdate();
PreparedStatement extensionStatement = connection.prepareStatement(extensionQuery);
extensionStatement.setInt(1, user.getId()); // exten
extensionStatement.setString(2, "SIP/" + user.getUsername()); // appdata
extensionStatement.executeUpdate();
} catch (SQLException e) {
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(Level.SEVERE, "SQL Query failed: " + sipQuery);
e.printStackTrace();
} finally {
try {
connection.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
});
}
public static void deleteUser(final User user){
executor.submit(new Runnable() {
@Override
public void run() {
Connection connection = getConnection();
if (connection == null) {
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(
Level.SEVERE, "User " + user.getUsername() + " could not be deleted from Asterisk; no connection was established");
return;
}
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(Level.INFO, "Deleting user " + user.getUsername() + " from Asterisk");
String sipQuery = "DELETE FROM sip_buddies WHERE NAME=?;";
String extensionQuery = "DELETE FROM extensions WHERE exten=?;";
try {
PreparedStatement sipStatement = connection.prepareStatement(sipQuery);
sipStatement.setString(1, user.getUsername());
sipStatement.executeUpdate();
PreparedStatement extensionStatement = connection.prepareStatement(extensionQuery);
extensionStatement.setInt(1, user.getId());
extensionStatement.executeUpdate();
} catch (SQLException e) {
Logger.getLogger(AsteriskRealTimeService.class.getName()).log(Level.SEVERE, "SQL Query failed: " + sipQuery);
e.printStackTrace();
} finally {
try {
connection.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
});
}
}
| Revised Asterisk's addUser query
| web/src/main/java/rtdc/web/server/service/AsteriskRealTimeService.java | Revised Asterisk's addUser query | <ide><path>eb/src/main/java/rtdc/web/server/service/AsteriskRealTimeService.java
<ide> }
<ide> Logger.getLogger(AsteriskRealTimeService.class.getName()).log(Level.INFO, "Adding user " + user.getUsername() + " to Asterisk with extension " + user.getId());
<ide> String sipQuery = "INSERT INTO sip_buddies (" +
<del> " NAME, defaultuser, callerid, secret, context, HOST, TYPE, allow) VALUES (" +
<add> " NAME, defaultuser, callerid, secret, context, HOST, TYPE) VALUES (" +
<ide> " ?," + " ?," + " ?," + " ?," +
<del> " 'users', 'dynamic', 'friend'," + // context, host, type
<del> " 'g729;ilbc;gsm;ulaw;alaw;vp8');"; // allow (should be updated with which codecs we want users to be able to use)
<add> " 'users', 'dynamic', 'friend');"; // context, host, type
<ide> String extensionQuery = "INSERT INTO extensions (context, exten, priority, app, appdata) VALUES (" +
<ide> " 'users', ?, 1, 'Dial', ?);";
<ide> try { |
|
JavaScript | apache-2.0 | 6fb4f8f40d76c9a74a57dc126389128bbf0ae804 | 0 | freedomjs/freedom-for-firefox,freedomjs/freedom-for-firefox | /*jslint node:true*/
/**
* Gruntfile for freedom-for-firefox.jsm
*
* This repository provides firefox (extension and os app)
* specific packaging of the freedom.js library.
*
* Here are the common tasks defined:
* build
* - Lint source and compile
* - (Default task)
* - Unit tests for sanity checking possible without actually launching firefox
* test
* - Build and run firefox extension for integration tests
**/
var freedomPrefix = require('path').dirname(require.resolve('freedom'));
module.exports = function (grunt) {
'use strict';
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
jshint: {
providers: ['providers/*.js'],
options: {
'-W069': true,
'-W104': false
}
},
browserify: {
freedom: {
files: {
'freedom-for-firefox.jsm': ['src/entry.js']
},
options: {
postBundleCB: function (err, src, next) {
next(err, require('fs').readFileSync(
require.resolve('freedom/src/util/header.txt')
) + src);
}
}
},
jasmine: {
files: {
'spec.jsm': ['src/spec.js']
}
},
frame: {
files: {
'build/frame.js': require.resolve('freedom/src/util/frameEntry.js')
}
},
options: {
transform: [['folderify', {global: true}]],
alias: [
'./src/promise.js:es6-promise'
]
}
},
"build-test-addon": {
freedom: {
files: {
'.build': [ 'spec.jsm' ]
},
options: {
helper: [
{path: 'freedom-for-firefox.jsm', include: false},
{path: freedomPrefix + '/providers', name: 'providers', include: false},
{path: freedomPrefix + '/spec', name: 'spec', include: false}
]
}
}
},
bump: {
options: {
files: ['package.json'],
commit: true,
commitMessage: 'Release v%VERSION%',
commitFiles: ['package.json'],
createTag: true,
tagName: 'v%VERSION%',
tagMessage: 'Version %VERSION%',
push: true,
pushTo: 'origin'
}
},
clean: ['freedom-for-firefox.jsm', 'freedom.map', 'node_modules/',
'spec.jsm', '.build/'],
'npm-publish': {
options: {
// list of tasks that are required before publishing
requires: [],
// if the workspace is dirty, abort publishing (to avoid publishing local changes)
abortIfDirty: true
}
},
prompt: {
tagMessage: {
options: {
questions: [
{
config: 'bump.options.tagMessage',
type: 'input',
message: 'Enter a git tag message:',
default: 'v%VERSION%'
}
]
}
}
},
shell: {
options: {},
publishWebsite: {
command: 'bash tools/publishWebsite.sh'
}
},
copy: {
demo: {
src: 'build/frame.js',
dest: 'demo/tictak/data/freedom-frame.js'
}
}
});
grunt.loadNpmTasks('grunt-browserify');
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-prompt');
grunt.loadNpmTasks('grunt-bump');
grunt.loadNpmTasks('grunt-npm');
grunt.loadNpmTasks('grunt-shell');
grunt.loadTasks('tasks');
grunt.registerTask('build', [
'jshint:providers',
'browserify:freedom'
]);
grunt.registerTask('test', [
'build',
'browserify:jasmine',
'integration'
]);
grunt.registerTask('release', function (arg) {
if (arguments.length === 0) {
arg = 'patch';
}
grunt.task.run([
'default',
'prompt:tagMessage',
'bump:' + arg,
'npm-publish',
'shell:publishWebsite'
]);
});
grunt.registerTask('default', ['build']);
};
| Gruntfile.js | /*jslint node:true*/
/**
* Gruntfile for freedom-for-firefox.jsm
*
* This repository provides firefox (extension and os app)
* specific packaging of the freedom.js library.
*
* Here are the common tasks defined:
* build
* - Lint source and compile
* - (Default task)
* - Unit tests for sanity checking possible without actually launching firefox
* test
* - Build and run firefox extension for integration tests
**/
var freedomPrefix = require('path').dirname(require.resolve('freedom'));
module.exports = function (grunt) {
'use strict';
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
jshint: {
providers: ['providers/*.js'],
options: {
'-W069': true,
'-W104': false
}
},
browserify: {
freedom: {
files: {
'freedom-for-firefox.jsm': ['src/entry.js']
},
options: {
postBundleCB: function (err, src, next) {
next(err, require('fs').readFileSync(
require.resolve('freedom/src/util/header.txt')
) + src);
}
}
},
jasmine: {
files: {
'spec.jsm': ['src/spec.js']
}
},
frame: {
files: {
'build/frame.js': require.resolve('freedom/src/util/frameEntry.js')
}
},
options: {
transform: [['folderify', {global: true}]],
alias: [
'./src/promise.js:es6-promise'
]
}
},
"build-test-addon": {
freedom: {
files: {
'.build': [ 'spec.jsm' ]
},
options: {
helper: [
{path: 'freedom-for-firefox.jsm', include: false},
{path: freedomPrefix + '/providers', name: 'providers', include: false},
{path: freedomPrefix + '/spec', name: 'spec', include: false}
]
}
}
},
bump: {
options: {
files: ['package.json'],
commit: true,
commitMessage: 'Release v%VERSION%',
commitFiles: ['package.json'],
createTag: true,
tagName: 'v%VERSION%',
tagMessage: 'Version %VERSION%',
push: true,
pushTo: 'origin'
}
},
clean: ['freedom-for-firefox.jsm', 'freedom.map', 'node_modules/',
'spec.jsm', '.build/'],
'npm-publish': {
options: {
// list of tasks that are required before publishing
requires: [],
// if the workspace is dirty, abort publishing (to avoid publishing local changes)
abortIfDirty: true
}
},
prompt: {
tagMessage: {
options: {
questions: [
{
config: 'bump.options.tagMessage',
type: 'input',
message: 'Enter a git tag message:',
default: 'v%VERSION%'
}
]
}
}
},
shell: {
options: {},
publishWebsite: {
command: 'bash tools/publishWebsite.sh'
}
},
copy: {
demo: {
src: 'build/frame.js',
dest: 'demo/tictak/data/freedom-frame.js'
}
}
});
grunt.loadNpmTasks('grunt-browserify');
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-prompt');
grunt.loadNpmTasks('grunt-bump');
grunt.loadNpmTasks('grunt-npm');
grunt.loadNpmTasks('grunt-shell');
grunt.loadTasks('tasks');
grunt.registerTask('build', [
'jshint:providers',
'browserify:freedom'
]);
grunt.registerTask('test', [
'build',
'browserify:jasmine',
'integration'
]);
grunt.registerTask('release', function (arg) {
if (arguments.length === 0) {
arg = 'patch';
}
grunt.task.run([
'default',
'prompt:tagMessage',
'bump:' + arg,
'npm-publish',
'shell:publishWebsite'
]);
});
grunt.registerTask('default', ['build']);
};
| spacing fix
| Gruntfile.js | spacing fix | <ide><path>runtfile.js
<ide> config: 'bump.options.tagMessage',
<ide> type: 'input',
<ide> message: 'Enter a git tag message:',
<del> default: 'v%VERSION%'
<add> default: 'v%VERSION%'
<ide> }
<ide> ]
<ide> } |
|
Java | mit | f7328c62767cf7d9e5f507b5acc7297ef8f8be2f | 0 | sorcix/sIRC | /*
* ClientState.java
*
* This file is part of the Sorcix Java IRC Library (sIRC).
*
* Copyright (C) 2008-2010 Vic Demuzere http://sorcix.com
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use, copy,
* modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.sorcix.sirc;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* Contains information about an {@link IrcConnection}.
*
* @author Sorcix
* @since 1.1.0
*/
public final class ClientState {
// TODO: Allow changing the username (sIRC@..)
/** The list of channels. */
private final Map<String, Channel> channels;
/** The local user. */
private User client;
/**
* Creates a new ClientState.
*/
protected ClientState() {
this.channels = new HashMap<String, Channel>();
}
/**
* Adds a channel to the channel list.
*
* @param channel The channel to add.
*/
protected void addChannel(final Channel channel) {
if (!this.channels.containsKey(channel.getName())) {
this.channels.put(channel.getName().toLowerCase(), channel);
}
}
/**
* Retrieves a shared channel object from the channel list.
*
* @param channel A channel object representing this channel.
* @return The channel, or null if this channel doesn't exist.
* (The local user is not in that channel)
* @see #getChannel(String)
*/
protected Channel getChannel(final Channel channel) {
return this.getChannel(channel.getName());
}
/**
* Retrieves a shared channel object from the channel list.
*
* @param channel The channel name.
* @return The channel, or null if this channel doesn't exist.
* (The local user is not in that channel)
*/
protected Channel getChannel(final String channel) {
return this.channels.get(channel.toLowerCase());
}
/**
* Creates an iterator through all Channels.
*
* @return an iterator through all Channels.
*/
public Iterator<Channel> getChannels() {
return this.channels.values().iterator();
}
/**
* Retrieves the local {@link User}.
*
* @return The local {@code User}.
*/
public User getClient() {
return this.client;
}
/**
* Checks if given channel is in the channel list.
*
* @param name The name of this channel.
* @return True if the channel is in the list, false otherwise.
*/
protected boolean hasChannel(final String name) {
return this.channels.containsKey(name);
}
/**
* Remove all channels from the channel list.
*/
protected void removeAll() {
this.channels.clear();
}
/**
* Removes a channel from the channel list.
*
* @param channel The channel name.
*/
protected void removeChannel(final String channel) {
if (this.channels.containsKey(channel)) {
this.channels.remove(channel);
}
}
/**
* Set the local {@link User}.
*
* @param user The local {@code User}.
*/
protected void setClient(final User user) {
this.client = user;
}
}
| src/main/java/com/sorcix/sirc/ClientState.java | /*
* ClientState.java
*
* This file is part of the Sorcix Java IRC Library (sIRC).
*
* Copyright (C) 2008-2010 Vic Demuzere http://sorcix.com
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use, copy,
* modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.sorcix.sirc;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* Contains information about an {@link IrcConnection}.
*
* @author Sorcix
* @since 1.1.0
*/
public final class ClientState {
// TODO: Allow changing the username (sIRC@..)
/** The list of channels. */
private final Map<String, Channel> channels;
/** The local user. */
private User client;
/**
* Creates a new ClientState.
*/
protected ClientState() {
this.channels = new HashMap<String, Channel>();
}
/**
* Adds a channel to the channel list.
*
* @param channel The channel to add.
*/
protected void addChannel(final Channel channel) {
if (!this.channels.containsKey(channel.getName())) {
this.channels.put(channel.getName(), channel);
}
}
/**
* Retrieves a shared channel object from the channel list.
*
* @param channel A channel object representing this channel.
* @return The channel, or null if this channel doesn't exist.
* (The local user is not in that channel)
* @see #getChannel(String)
*/
protected Channel getChannel(final Channel channel) {
return this.getChannel(channel.getName());
}
/**
* Retrieves a shared channel object from the channel list.
*
* @param channel The channel name.
* @return The channel, or null if this channel doesn't exist.
* (The local user is not in that channel)
*/
protected Channel getChannel(final String channel) {
if (this.channels.containsKey(channel)) {
return this.channels.get(channel);
}
return null;
}
/**
* Creates an iterator through all Channels.
*
* @return an iterator through all Channels.
*/
public Iterator<Channel> getChannels() {
return this.channels.values().iterator();
}
/**
* Retrieves the local {@link User}.
*
* @return The local {@code User}.
*/
public User getClient() {
return this.client;
}
/**
* Checks if given channel is in the channel list.
*
* @param name The name of this channel.
* @return True if the channel is in the list, false otherwise.
*/
protected boolean hasChannel(final String name) {
return this.channels.containsKey(name);
}
/**
* Remove all channels from the channel list.
*/
protected void removeAll() {
this.channels.clear();
}
/**
* Removes a channel from the channel list.
*
* @param channel The channel name.
*/
protected void removeChannel(final String channel) {
if (this.channels.containsKey(channel)) {
this.channels.remove(channel);
}
}
/**
* Set the local {@link User}.
*
* @param user The local {@code User}.
*/
protected void setClient(final User user) {
this.client = user;
}
} | ClientState.getChannel should be case-insensitive
| src/main/java/com/sorcix/sirc/ClientState.java | ClientState.getChannel should be case-insensitive | <ide><path>rc/main/java/com/sorcix/sirc/ClientState.java
<ide> */
<ide> protected void addChannel(final Channel channel) {
<ide> if (!this.channels.containsKey(channel.getName())) {
<del> this.channels.put(channel.getName(), channel);
<add> this.channels.put(channel.getName().toLowerCase(), channel);
<ide> }
<ide> }
<ide>
<ide> * (The local user is not in that channel)
<ide> */
<ide> protected Channel getChannel(final String channel) {
<del> if (this.channels.containsKey(channel)) {
<del> return this.channels.get(channel);
<del> }
<del> return null;
<add> return this.channels.get(channel.toLowerCase());
<ide> }
<ide>
<ide> /** |
|
JavaScript | mit | 0158f76484f83c73de42167d2277e1d5de6953de | 0 | cheung31/streamhub-gallery | define([
'streamhub-gallery/horizontal-list-view',
'text!streamhub-gallery/css/gallery-view.css',
'hgn!streamhub-gallery/css/theme.css',
'streamhub-sdk/util'
], function (HorizontalListView, GalleryViewCss, ThemeCssTemplate, util) {
var STYLE_EL,
GALLERY_THEME_STYLE_EL = $('<style></style>');
var GALLERY_CSS = {
contentBefore: {
transforms: {
translateX: '-9999px',
scale: 0.45
}
},
contentAfter: {
transforms: {
translateX: '9999px',
scale: 0.45
}
}
};
GALLERY_CSS.contentBefore1 = { opacity: 0.7 };
GALLERY_CSS.contentBefore2 = { opacity: 0.3 };
GALLERY_CSS.contentBefore3 = { opacity: 0.1 };
GALLERY_CSS.contentAfter1 = { opacity: 0.7 };
GALLERY_CSS.contentAfter2 = { opacity: 0.3 };
GALLERY_CSS.contentAfter3 = { opacity: 0.1 };
var GalleryView = function (opts) {
opts = opts || {};
opts.modal = opts.modal || true;
opts.aspectRatio = opts.aspectRatio || 16/9;
this._activeContentView = null;
HorizontalListView.call(this, opts);
this._id = this.galleryListViewClassName + '-' + new Date().getTime();
if (!STYLE_EL) {
STYLE_EL = $('<style></style>').text(GalleryViewCss).prependTo('head');
}
};
util.inherits(GalleryView, HorizontalListView);
GalleryView.prototype.galleryListViewClassName = 'streamhub-gallery-view';
GalleryView.prototype.setElement = function (el) {
this.el = document.createElement('div');
HorizontalListView.prototype.setElement.call(this, this.el);
$(this.el).appendTo(el);
var self = this;
this.$el.on('focusContent.hub', function (e) {
var contentEl = $(e.target).hasClass('content') ? e.target : $(e.target).closest('article.content')[0];
if ($(contentEl).parent().hasClass('content-before') || $(contentEl).parent().hasClass('content-after')) {
e.stopImmediatePropagation();
}
});
this.$el.on('click', '.content-before, .content-after', function (e) {
e.preventDefault();
e.stopPropagation();
var targetContentView;
for (var i=0; i < self.contentViews.length; i++) {
var contentEl = $(e.target).hasClass('content') ? e.target : $(e.target).closest('article.content')[0];
if (self.contentViews[i].el === contentEl) {
targetContentView = self.contentViews[i];
break;
}
}
var activeIndex = self.contentViews.indexOf(self._activeContentView);
var targetIndex = self.contentViews.indexOf(targetContentView);
if (targetIndex > activeIndex) {
self.next();
} else if (targetIndex < activeIndex) {
self.prev();
}
});
this.$el.on('imageLoaded.hub', function (e) {
self._adjustContentSize();
});
this.$el.addClass(this.galleryListViewClassName);
};
GalleryView.prototype._insert = function (contentView) {
var self = this,
newContentViewIndex,
$previousEl;
newContentViewIndex = this.contentViews.indexOf(contentView);
var $containerEl = $('<div class="' + this.contentContainerClassName + '"></div>');
contentView.$el.wrap($containerEl);
var $wrappedEl = contentView.$el.parent();
if (newContentViewIndex === 0) {
// Beginning!
$wrappedEl.prependTo(this.el);
} else {
// Find it's previous contentView and insert new contentView after
$previousEl = this.contentViews[newContentViewIndex - 1].$el;
$wrappedEl.insertAfter($previousEl.parent('.'+this.contentContainerClassName));
}
this.focus();
};
GalleryView.prototype.next = function () {
this.$el.removeClass('animate');
var originalActiveContentView = this._activeContentView;
var activeIndex = this.contentViews.indexOf(this._activeContentView);
var targetContentView = this.contentViews[Math.min(activeIndex+1, this.contentViews.length-1)];
var newTransforms = $.extend(true, {}, this.focus({
translate: false,
contentView: targetContentView
}));
this.focus({
contentView: originalActiveContentView
});
var self = this;
setTimeout(function() {
self.$el.addClass('animate');
self.focus({
translate: newTransforms,
contentView: targetContentView
});
},1);
};
GalleryView.prototype.prev = function () {
this.$el.removeClass('animate');
var originalActiveContentView = this._activeContentView;
var activeIndex = this.contentViews.indexOf(this._activeContentView);
var targetContentView = this.contentViews[activeIndex-1 || 0];
var newTransforms = $.extend(true, {}, this.focus({
translate: false,
contentView: targetContentView
}));
this.focus({
contentView: originalActiveContentView
});
var self = this;
setTimeout(function() {
self.$el.addClass('animate');
self.focus({
translate: newTransforms,
contentView: targetContentView
});
},1);
};
GalleryView.prototype.focus = function (opts) {
if (! this._activeContentView) {
this._activeContentView = this.contentViews[0];
}
opts = opts || {};
var contentContainerEls = this.$el.find('.content-container');
contentContainerEls.removeClass('content-active')
.removeClass('content-before-3')
.removeClass('content-before-2')
.removeClass('content-before-1')
.removeClass('content-after-3')
.removeClass('content-after-2')
.removeClass('content-after-1')
.removeClass('content-before')
.removeClass('content-after')
.removeAttr('style');
this._activeContentView = opts.contentView ? opts.contentView : this._activeContentView;
var activeIndex = this.contentViews.indexOf(this._activeContentView);
var targetContentEl = this.contentViews[activeIndex].$el;
var targetContainerEl = targetContentEl.parent();
targetContainerEl.addClass('content-active');
targetContainerEl.prevAll().addClass('content-before');
targetContainerEl.nextAll().addClass('content-after');
var before1 = targetContainerEl.prev().addClass('content-before-1');
var before2 = before1.prev().addClass('content-before-2');
var before3 = before2.prev().addClass('content-before-3');
var after1 = targetContainerEl.next().addClass('content-after-1');
var after2 = after1.next().addClass('content-after-2');
var after3 = after2.next().addClass('content-after-3');
return this._adjustContentSize(opts);
};
GalleryView.prototype._getContentSize = function () {
var containerHeight = this.$el.height();
var containerWidth = this.$el.width();
var contentWidth = Math.min(containerHeight * this._aspectRatio, containerWidth);
if (contentWidth == containerWidth) {
contentWidth = contentWidth * 0.8;
}
return { width: contentWidth, height: contentWidth / this._aspectRatio };
};
GalleryView.prototype._adjustContentSize = function (opts) {
var styleEl = $('style.'+this._id);
if (styleEl) {
styleEl.remove();
}
styleEl = $('<style class="'+this._id+'"></style>');
var styles = '';
var contentSize = this._getContentSize();
styles = '.'+this.horizontalListViewClassName + ' .'+this.contentContainerClassName + ' { width: ' + contentSize.width + 'px; height: ' + contentSize.height + 'px; margin-left: '+ contentSize.width/-2 + 'px; margin-top: '+ contentSize.height/-2+ 'px; }';
styleEl.html(styles);
$('head').append(styleEl);
// Make content with tiled attachments square except when there's a
// video attachment
var contentWithImageEls = this.$el.find('.content-with-image');
for (var i=0; i < contentWithImageEls.length; i++) {
var contentEl = contentWithImageEls.eq(i).closest('.content-container');
if (contentEl.find('.content-attachment-video').length) {
contentEl.find('.content, .content-attachment').css({
'padding-bottom': 1/this._aspectRatio * 100 + '%',
});
} else {
contentEl.css({
'width': contentSize.height + 'px',
'height': contentSize.height + 'px',
'margin-left': contentSize.height/-2 + 'px',
'margin-top': contentSize.height/-2 + 'px'
});
}
}
return this._adjustContentSpacing(opts);
};
GalleryView.prototype._adjustContentSpacing = function (opts) {
return this._slideshowSpacing(opts);
};
GalleryView.prototype._slideshowSpacing = function (opts) {
opts = opts || {};
var visibleAdjacentContent = 3;
if (opts.translate) {
GALLERY_CSS = opts.translate;
this._updateStyleEl(opts.translate);
return;
}
var adjacentContentEls = this.$el.find('.content-before, .content-after, .content-active');
if (!adjacentContentEls.length) {
return;
}
var beforeTranslateX = 0;
var afterTranslateX = 0;
for (var i=0; i < visibleAdjacentContent; i++) {
var adjacentIndex = i+1;
// Before
var contentBefore = adjacentContentEls.filter('.content-before-'+adjacentIndex);
var contentBeforeWidth;
if (contentBefore.length) {
GALLERY_CSS['contentBefore'+adjacentIndex].transforms = $.extend({}, GALLERY_CSS.contentBefore.transforms);
contentBeforeWidth = contentBefore[0].getBoundingClientRect().width;
var previousEl = contentBefore.next();
var previousWidth = previousEl[0].getBoundingClientRect().width;
beforeTranslateX = beforeTranslateX - previousWidth - (contentBeforeWidth - previousWidth)/2;
GALLERY_CSS['contentBefore'+adjacentIndex].transforms.translateX = beforeTranslateX+'px';
}
// After
var contentAfter = adjacentContentEls.filter('.content-after-'+adjacentIndex);
var contentAfterWidth;
if (contentAfter.length) {
GALLERY_CSS['contentAfter'+adjacentIndex].transforms = $.extend({}, GALLERY_CSS.contentAfter.transforms);
contentAfterWidth = contentAfter[0].getBoundingClientRect().width;
var previousEl = contentAfter.prev();
var previousWidth = previousEl[0].getBoundingClientRect().width;
afterTranslateX = afterTranslateX + previousWidth + (contentAfterWidth - previousWidth)/2
GALLERY_CSS['contentAfter'+adjacentIndex].transforms.translateX = afterTranslateX+'px';
}
}
this._updateStyleEl(opts.translate);
return GALLERY_CSS;
};
GalleryView.prototype._updateStyleEl = function (translate) {
translate = translate === undefined ? true : translate;
for (var style in GALLERY_CSS) {
var transform = '';
for (var t in GALLERY_CSS[style].transforms) {
if (translate || style == 'contentBefore' || style == 'contentAfter' || (!translate && t.indexOf('translate') == -1)) {
transform = transform + t + '(' + GALLERY_CSS[style].transforms[t] + ') ';
}
}
GALLERY_CSS[style].transform = transform;
}
var styleInnerHtml = ThemeCssTemplate(GALLERY_CSS);
var matches = styleInnerHtml.match(new RegExp("(\A|\})\s*(?![^ ~>|]*\.*\{)", 'g'));
for (var i=0; i < matches.length; i++) {
var idx = styleInnerHtml.indexOf(matches[i]);
styleInnerHtml = styleInnerHtml.slice(0, idx) +
this._id + styleInnerHtml.slice(idx);
}
GALLERY_THEME_STYLE_EL.remove();
GALLERY_THEME_STYLE_EL = $('<style></style>').text(styleInnerHtml).appendTo('head');
};
return GalleryView;
});
| src/main.js | define([
'streamhub-gallery/horizontal-list-view',
'text!streamhub-gallery/css/gallery-view.css',
'hgn!streamhub-gallery/css/theme.css',
'streamhub-sdk/util'
], function (HorizontalListView, GalleryViewCss, ThemeCssTemplate, util) {
var STYLE_EL,
GALLERY_THEME_STYLE_EL = $('<style></style>');
var GALLERY_CSS = {
contentBefore: {
transforms: {
translateX: '-9999px',
scale: 0.45
}
},
contentAfter: {
transforms: {
translateX: '9999px',
scale: 0.45
}
}
};
GALLERY_CSS.contentBefore1 = { opacity: 0.7 };
GALLERY_CSS.contentBefore2 = { opacity: 0.3 };
GALLERY_CSS.contentBefore3 = { opacity: 0.1 };
GALLERY_CSS.contentAfter1 = { opacity: 0.7 };
GALLERY_CSS.contentAfter2 = { opacity: 0.3 };
GALLERY_CSS.contentAfter3 = { opacity: 0.1 };
var GalleryView = function (opts) {
opts = opts || {};
opts.modal = opts.modal || true;
opts.aspectRatio = opts.aspectRatio || 16/9;
this._fullscreen = opts.fullscreen || false;
this._activeContentView = null;
HorizontalListView.call(this, opts);
this._id = this.galleryListViewClassName + '-' + new Date().getTime();
if (!STYLE_EL) {
STYLE_EL = $('<style></style>').text(GalleryViewCss).prependTo('head');
}
};
util.inherits(GalleryView, HorizontalListView);
GalleryView.prototype.galleryListViewClassName = 'streamhub-gallery-view';
GalleryView.prototype.setElement = function (el) {
this.el = document.createElement('div');
HorizontalListView.prototype.setElement.call(this, this.el);
$(this.el).appendTo(el);
var self = this;
this.$el.on('focusContent.hub', function (e) {
var contentEl = $(e.target).hasClass('content') ? e.target : $(e.target).closest('article.content')[0];
if ($(contentEl).parent().hasClass('content-before') || $(contentEl).parent().hasClass('content-after')) {
e.stopImmediatePropagation();
}
});
this.$el.on('click', '.content-before, .content-after', function (e) {
e.preventDefault();
e.stopPropagation();
var targetContentView;
for (var i=0; i < self.contentViews.length; i++) {
var contentEl = $(e.target).hasClass('content') ? e.target : $(e.target).closest('article.content')[0];
if (self.contentViews[i].el === contentEl) {
targetContentView = self.contentViews[i];
break;
}
}
var activeIndex = self.contentViews.indexOf(self._activeContentView);
var targetIndex = self.contentViews.indexOf(targetContentView);
if (targetIndex > activeIndex) {
self.next();
} else if (targetIndex < activeIndex) {
self.prev();
}
});
this.$el.on('imageLoaded.hub', function (e) {
self._adjustContentSize();
});
this.$el.addClass(this.galleryListViewClassName);
};
GalleryView.prototype._insert = function (contentView) {
var self = this,
newContentViewIndex,
$previousEl;
newContentViewIndex = this.contentViews.indexOf(contentView);
var $containerEl = $('<div class="' + this.contentContainerClassName + '"></div>');
contentView.$el.wrap($containerEl);
var $wrappedEl = contentView.$el.parent();
if (newContentViewIndex === 0) {
// Beginning!
$wrappedEl.prependTo(this.el);
} else {
// Find it's previous contentView and insert new contentView after
$previousEl = this.contentViews[newContentViewIndex - 1].$el;
$wrappedEl.insertAfter($previousEl.parent('.'+this.contentContainerClassName));
}
this.focus();
};
GalleryView.prototype.fullscreen = function (off) {
return;
var contentSize = this._getContentSize();
off || off === undefined ? this._fullscreenSpacing(contentSize.width): this._slideshowSpacing(contentSize.width);
this._fullscreen = off === undefined ? true : !!off;
};
GalleryView.prototype.next = function () {
this.$el.removeClass('animate');
var originalActiveContentView = this._activeContentView;
var activeIndex = this.contentViews.indexOf(this._activeContentView);
var targetContentView = this.contentViews[Math.min(activeIndex+1, this.contentViews.length-1)];
var newTransforms = $.extend(true, {}, this.focus({
translate: false,
contentView: targetContentView
}));
this.focus({
contentView: originalActiveContentView
});
var self = this;
setTimeout(function() {
self.$el.addClass('animate');
self.focus({
translate: newTransforms,
contentView: targetContentView
});
},1);
};
GalleryView.prototype.prev = function () {
this.$el.removeClass('animate');
var originalActiveContentView = this._activeContentView;
var activeIndex = this.contentViews.indexOf(this._activeContentView);
var targetContentView = this.contentViews[activeIndex-1 || 0];
var newTransforms = $.extend(true, {}, this.focus({
translate: false,
contentView: targetContentView
}));
this.focus({
contentView: originalActiveContentView
});
var self = this;
setTimeout(function() {
self.$el.addClass('animate');
self.focus({
translate: newTransforms,
contentView: targetContentView
});
},1);
};
GalleryView.prototype.focus = function (opts) {
if (! this._activeContentView) {
this._activeContentView = this.contentViews[0];
}
opts = opts || {};
var contentContainerEls = this.$el.find('.content-container');
contentContainerEls.removeClass('content-active')
.removeClass('content-before-3')
.removeClass('content-before-2')
.removeClass('content-before-1')
.removeClass('content-after-3')
.removeClass('content-after-2')
.removeClass('content-after-1')
.removeClass('content-before')
.removeClass('content-after')
.removeAttr('style');
this._activeContentView = opts.contentView ? opts.contentView : this._activeContentView;
var activeIndex = this.contentViews.indexOf(this._activeContentView);
var targetContentEl = this.contentViews[activeIndex].$el;
var targetContainerEl = targetContentEl.parent();
targetContainerEl.addClass('content-active');
targetContainerEl.prevAll().addClass('content-before');
targetContainerEl.nextAll().addClass('content-after');
var before1 = targetContainerEl.prev().addClass('content-before-1');
var before2 = before1.prev().addClass('content-before-2');
var before3 = before2.prev().addClass('content-before-3');
var after1 = targetContainerEl.next().addClass('content-after-1');
var after2 = after1.next().addClass('content-after-2');
var after3 = after2.next().addClass('content-after-3');
return this._adjustContentSize(opts);
};
GalleryView.prototype._getContentSize = function () {
var containerHeight = this.$el.height();
var containerWidth = this.$el.width();
var contentWidth = Math.min(containerHeight * this._aspectRatio, containerWidth);
if (contentWidth == containerWidth) {
contentWidth = contentWidth * 0.8;
}
return { width: contentWidth, height: contentWidth / this._aspectRatio };
};
GalleryView.prototype._adjustContentSize = function (opts) {
var styleEl = $('style.'+this._id);
if (styleEl) {
styleEl.remove();
}
styleEl = $('<style class="'+this._id+'"></style>');
var styles = '';
var contentSize = this._getContentSize();
styles = '.'+this.horizontalListViewClassName + ' .'+this.contentContainerClassName + ' { width: ' + contentSize.width + 'px; height: ' + contentSize.height + 'px; margin-left: '+ contentSize.width/-2 + 'px; margin-top: '+ contentSize.height/-2+ 'px; }';
styleEl.html(styles);
$('head').append(styleEl);
// Make content with tiled attachments square except when there's a
// video attachment
var contentWithImageEls = this.$el.find('.content-with-image');
for (var i=0; i < contentWithImageEls.length; i++) {
var contentEl = contentWithImageEls.eq(i).closest('.content-container');
if (contentEl.find('.content-attachment-video').length) {
contentEl.find('.content, .content-attachment').css({
'padding-bottom': 1/this._aspectRatio * 100 + '%',
});
} else {
contentEl.css({
'width': contentSize.height + 'px',
'height': contentSize.height + 'px',
'margin-left': contentSize.height/-2 + 'px',
'margin-top': contentSize.height/-2 + 'px'
});
}
}
return this._adjustContentSpacing(opts);
};
GalleryView.prototype._adjustContentSpacing = function (opts) {
return this._fullscreen ? this._fullscreenSpacing(opts) : this._slideshowSpacing(opts);
};
GalleryView.prototype._slideshowSpacing = function (opts) {
opts = opts || {};
var visibleAdjacentContent = 3;
if (opts.translate) {
GALLERY_CSS = opts.translate;
this._updateStyleEl(opts.translate);
return;
}
var adjacentContentEls = this.$el.find('.content-before, .content-after, .content-active');
if (!adjacentContentEls.length) {
return;
}
var beforeTranslateX = 0;
var afterTranslateX = 0;
for (var i=0; i < visibleAdjacentContent; i++) {
var adjacentIndex = i+1;
// Before
var contentBefore = adjacentContentEls.filter('.content-before-'+adjacentIndex);
var contentBeforeWidth;
if (contentBefore.length) {
GALLERY_CSS['contentBefore'+adjacentIndex].transforms = $.extend({}, GALLERY_CSS.contentBefore.transforms);
contentBeforeWidth = contentBefore[0].getBoundingClientRect().width;
var previousEl = contentBefore.next();
var previousWidth = previousEl[0].getBoundingClientRect().width;
beforeTranslateX = beforeTranslateX - previousWidth - (contentBeforeWidth - previousWidth)/2;
GALLERY_CSS['contentBefore'+adjacentIndex].transforms.translateX = beforeTranslateX+'px';
}
// After
var contentAfter = adjacentContentEls.filter('.content-after-'+adjacentIndex);
var contentAfterWidth;
if (contentAfter.length) {
GALLERY_CSS['contentAfter'+adjacentIndex].transforms = $.extend({}, GALLERY_CSS.contentAfter.transforms);
contentAfterWidth = contentAfter[0].getBoundingClientRect().width;
var previousEl = contentAfter.prev();
var previousWidth = previousEl[0].getBoundingClientRect().width;
afterTranslateX = afterTranslateX + previousWidth + (contentAfterWidth - previousWidth)/2
GALLERY_CSS['contentAfter'+adjacentIndex].transforms.translateX = afterTranslateX+'px';
}
}
this._updateStyleEl(opts.translate);
return GALLERY_CSS;
};
GalleryView.prototype._updateStyleEl = function (translate) {
translate = translate === undefined ? true : translate;
for (var style in GALLERY_CSS) {
var transform = '';
for (var t in GALLERY_CSS[style].transforms) {
if (translate || style == 'contentBefore' || style == 'contentAfter' || (!translate && t.indexOf('translate') == -1)) {
transform = transform + t + '(' + GALLERY_CSS[style].transforms[t] + ') ';
}
}
GALLERY_CSS[style].transform = transform;
}
var styleInnerHtml = ThemeCssTemplate(GALLERY_CSS);
var matches = styleInnerHtml.match(new RegExp("(\A|\})\s*(?![^ ~>|]*\.*\{)", 'g'));
for (var i=0; i < matches.length; i++) {
var idx = styleInnerHtml.indexOf(matches[i]);
styleInnerHtml = styleInnerHtml.slice(0, idx) +
this._id + styleInnerHtml.slice(idx);
}
GALLERY_THEME_STYLE_EL.remove();
GALLERY_THEME_STYLE_EL = $('<style></style>').text(styleInnerHtml).appendTo('head');
};
return GalleryView;
});
| Remove fullscreen functionality
| src/main.js | Remove fullscreen functionality | <ide><path>rc/main.js
<ide> opts.modal = opts.modal || true;
<ide> opts.aspectRatio = opts.aspectRatio || 16/9;
<ide>
<del> this._fullscreen = opts.fullscreen || false;
<ide> this._activeContentView = null;
<ide> HorizontalListView.call(this, opts);
<ide>
<ide> }
<ide>
<ide> this.focus();
<del> };
<del>
<del> GalleryView.prototype.fullscreen = function (off) {
<del> return;
<del> var contentSize = this._getContentSize();
<del> off || off === undefined ? this._fullscreenSpacing(contentSize.width): this._slideshowSpacing(contentSize.width);
<del> this._fullscreen = off === undefined ? true : !!off;
<ide> };
<ide>
<ide> GalleryView.prototype.next = function () {
<ide> };
<ide>
<ide> GalleryView.prototype._adjustContentSpacing = function (opts) {
<del> return this._fullscreen ? this._fullscreenSpacing(opts) : this._slideshowSpacing(opts);
<add> return this._slideshowSpacing(opts);
<ide> };
<ide>
<ide> GalleryView.prototype._slideshowSpacing = function (opts) { |
|
Java | mit | 69947e03ec8a39a1b7622737edeed1a74a97b9b0 | 0 | romank0/dddsample-core,gacalves/dddsample-core,stefan-ka/dddsample-core,citerus/dddsample-core,IzzyXie2010/dddsample-core,citerus/dddsample-core,stefan-ka/dddsample-core,orende/dddsample-core,loothingpogixxv/dddsample-core,loothingpogixxv/dddsample-core,IzzyXie2010/dddsample-core,romank0/dddsample-core,gacalves/dddsample-core,orende/dddsample-core | package se.citerus.dddsample.domain.model.cargo;
import org.apache.commons.lang.Validate;
import se.citerus.dddsample.domain.model.handling.HandlingEvent;
import se.citerus.dddsample.domain.model.location.Location;
import se.citerus.dddsample.domain.shared.ValueObject;
import java.util.Collections;
import java.util.Date;
import java.util.List;
/**
* An itinerary.
*
*/
public class Itinerary implements ValueObject<Itinerary> {
private List<Leg> legs = Collections.emptyList();
static final Itinerary EMPTY_ITINERARY = new Itinerary();
private static final Date END_OF_DAYS = new Date(Long.MAX_VALUE);
/**
* Constructor.
*
* @param legs List of legs for this itinerary.
*/
public Itinerary(final List<Leg> legs) {
Validate.notEmpty(legs);
Validate.noNullElements(legs);
this.legs = legs;
}
/**
* @return the legs of this itinerary, as an <b>immutable</b> list.
*/
public List<Leg> legs() {
return Collections.unmodifiableList(legs);
}
/**
* Test if the given handling event is expected when executing this itinerary.
*
* @param event Event to test.
* @return <code>true</code> if the event is expected
*/
public boolean isExpected(final HandlingEvent event) {
if (legs.isEmpty()) {
return false;
}
if (event.type() == HandlingEvent.Type.RECEIVE) {
//Check that the first leg's origin is the event's location
final Leg leg = legs.get(0);
return (leg.loadLocation().equals(event.location()));
}
if (event.type() == HandlingEvent.Type.LOAD) {
//Check that the there is one leg with same load location and voyage
for (Leg leg : legs) {
if (leg.loadLocation().sameIdentityAs(event.location()) &&
leg.voyage().sameIdentityAs(event.voyage()))
return true;
}
return false;
}
if (event.type() == HandlingEvent.Type.UNLOAD) {
//Check that the there is one leg with same unload location and voyage
for (Leg leg : legs) {
if (leg.unloadLocation().equals(event.location()) &&
leg.voyage().equals(event.voyage()))
return true;
}
return false;
}
if (event.type() == HandlingEvent.Type.CLAIM) {
//Check that the last leg's destination is from the event's location
final Leg leg = lastLeg();
return (leg.unloadLocation().equals(event.location()));
}
//HandlingEvent.Type.CUSTOMS;
return true;
}
/**
* @return The initial departure location.
*/
Location initialDepartureLocation() {
if (legs.isEmpty()) {
return Location.UNKNOWN;
} else {
return legs.get(0).loadLocation();
}
}
/**
* @return The final arrival location.
*/
Location finalArrivalLocation() {
if (legs.isEmpty()) {
return Location.UNKNOWN;
} else {
return lastLeg().unloadLocation();
}
}
/**
* @return Date when cargo arrives at final destination.
*/
Date finalArrivalDate() {
final Leg lastLeg = lastLeg();
if (lastLeg == null) {
return new Date(END_OF_DAYS.getTime());
} else {
return new Date(lastLeg.unloadTime().getTime());
}
}
/**
* @return The last leg on the itinerary.
*/
Leg lastLeg() {
if (legs.isEmpty()) {
return null;
} else {
return legs.get(legs.size() - 1);
}
}
/**
* @return The first leg on the itinerary.
*/
Leg firstLeg() {
if (legs.isEmpty()) {
return null;
} else {
return legs.get(0);
}
}
/**
* @param other itinerary to compare
* @return <code>true</code> if the legs in this and the other itinerary are all equal.
*/
@Override
public boolean sameValueAs(final Itinerary other) {
return other != null && legs.equals(other.legs);
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final Itinerary itinerary = (Itinerary) o;
return sameValueAs(itinerary);
}
@Override
public int hashCode() {
return legs.hashCode();
}
Itinerary() {
// Needed by Hibernate
}
// Auto-generated surrogate key
private Long id;
}
| dddsample/src/main/java/se/citerus/dddsample/domain/model/cargo/Itinerary.java | package se.citerus.dddsample.domain.model.cargo;
import org.apache.commons.lang.Validate;
import se.citerus.dddsample.domain.model.handling.HandlingEvent;
import se.citerus.dddsample.domain.model.location.Location;
import se.citerus.dddsample.domain.shared.ValueObject;
import java.util.Collections;
import java.util.Date;
import java.util.List;
/**
* An itinerary.
*
*/
public class Itinerary implements ValueObject<Itinerary> {
private List<Leg> legs = Collections.emptyList();
static final Itinerary EMPTY_ITINERARY = new Itinerary();
private static final Date END_OF_DAYS = new Date(Long.MAX_VALUE);
/**
* Constructor.
*
* @param legs List of legs for this itinerary.
*/
public Itinerary(final List<Leg> legs) {
Validate.notEmpty(legs);
Validate.noNullElements(legs);
this.legs = legs;
}
/**
* @return the legs of this itinerary, as an <b>immutable</b> list.
*/
public List<Leg> legs() {
return Collections.unmodifiableList(legs);
}
/**
* Test if the given handling event is expected when executing this itinerary.
*
* @param event Event to test.
* @return <code>true</code> if the event is expected
*/
public boolean isExpected(final HandlingEvent event) {
if (legs.isEmpty()) {
return false;
}
if (event.type() == HandlingEvent.Type.RECEIVE) {
//Check that the first leg's origin is the event's location
final Leg leg = legs.get(0);
return (leg.loadLocation().equals(event.location()));
}
if (event.type() == HandlingEvent.Type.LOAD) {
//Check that the there is one leg with same load location and voyage
for (Leg leg : legs) {
if (leg.loadLocation().sameIdentityAs(event.location()) &&
leg.voyage().sameIdentityAs(event.voyage()))
return true;
}
return false;
}
if (event.type() == HandlingEvent.Type.UNLOAD) {
//Check that the there is one leg with same unload location and voyage
for (Leg leg : legs) {
if (leg.unloadLocation().equals(event.location()) &&
leg.voyage().equals(event.voyage()))
return true;
}
return false;
}
if (event.type() == HandlingEvent.Type.CLAIM) {
//Check that the last leg's destination is from the event's location
final Leg leg = lastLeg();
return (leg.unloadLocation().equals(event.location()));
}
//HandlingEvent.Type.CUSTOMS;
return true;
}
/**
* @return The initial departure location.
*/
Location initialDepartureLocation() {
if (legs.isEmpty()) {
return Location.UNKNOWN;
} else {
return legs.get(0).loadLocation();
}
}
/**
* @return The final arrival location.
*/
Location finalArrivalLocation() {
if (legs.isEmpty()) {
return Location.UNKNOWN;
} else {
return lastLeg().unloadLocation();
}
}
/**
* @return Date when cargo arrives at final destination.
*/
Date finalArrivalDate() {
final Leg lastLeg = lastLeg();
if (lastLeg == null) {
return new Date(END_OF_DAYS.getTime());
} else {
return new Date(lastLeg.unloadTime().getTime());
}
}
/**
* @return The last leg on the itinerary.
*/
Leg lastLeg() {
if (legs.isEmpty()) {
return null;
} else {
return legs.get(legs.size() - 1);
}
}
/**
* @param other itinerary to compare
* @return <code>true</code> if the legs in this and the other itinerary are all equal.
*/
@Override
public boolean sameValueAs(final Itinerary other) {
return other != null && legs.equals(other.legs);
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final Itinerary itinerary = (Itinerary) o;
return sameValueAs(itinerary);
}
@Override
public int hashCode() {
return legs.hashCode();
}
Itinerary() {
// Needed by Hibernate
}
// Auto-generated surrogate key
private Long id;
}
| Added a firstLeg() method | dddsample/src/main/java/se/citerus/dddsample/domain/model/cargo/Itinerary.java | Added a firstLeg() method | <ide><path>ddsample/src/main/java/se/citerus/dddsample/domain/model/cargo/Itinerary.java
<ide> }
<ide>
<ide> /**
<add> * @return The first leg on the itinerary.
<add> */
<add> Leg firstLeg() {
<add> if (legs.isEmpty()) {
<add> return null;
<add> } else {
<add> return legs.get(0);
<add> }
<add> }
<add>
<add> /**
<ide> * @param other itinerary to compare
<ide> * @return <code>true</code> if the legs in this and the other itinerary are all equal.
<ide> */ |
|
Java | mit | 8272deb5398f4ea2876433f2fe26d01c136f63b4 | 0 | gamax92/OC-Example-Architecture | package li.cil.oc.example.tileentity;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.event.FMLPreInitializationEvent;
import cpw.mods.fml.common.registry.GameRegistry;
/**
* This mod demonstrates how to create tile entities that are treated as
* components by OpenComputers, i.e. tile entities that provide methods which
* can be called from a program running on a computer.
* <p/>
* The mod tries to keep everything else to a minimum, to focus on the mod-
* specific parts. It is not intended for use or distribution, but you're free
* to base a proper addon on this code.
*/
@Mod(modid = "OpenComputers|ExampleTileEntity",
name = "OpenComputers Addon Example - TileEntity",
version = "1.0.0",
dependencies = "required-after:OpenComputers@[1.2.0,)")
public class ModExampleTileEntity {
@Mod.Instance
public static ModExampleTileEntity instance;
public static BlockRadar radar;
public static BlockSimpleRadar simpleRadar;
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent e) {
radar = new BlockRadar(3660);
GameRegistry.registerBlock(radar, "oc:example_radar");
GameRegistry.registerTileEntity(TileEntityRadar.class, "oc:example_radar");
simpleRadar = new BlockSimpleRadar(3661);
GameRegistry.registerBlock(simpleRadar, "oc:example_simple_radar");
GameRegistry.registerTileEntity(TileEntitySimpleRadar.class, "oc:example_simple_radar");
}
}
| src/li/cil/oc/example/tileentity/ModExampleTileEntity.java | package li.cil.oc.example.tileentity;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.event.FMLPreInitializationEvent;
import cpw.mods.fml.common.registry.GameRegistry;
/**
* This mod demonstrates how to create tile entities that are treated as
* components by OpenComputers, i.e. tile entities that provide methods which
* can be called from a program running on a computer.
* <p/>
* The mod tries to keep everything else to a minimum, to focus on the mod-
* specific parts. It is not intended for use or distribution, but you're free
* to base a proper addon on this code.
*/
@Mod(modid = "OpenComputers|ExampleTileEntity",
name = "OpenComputers Addon Example - TileEntity",
version = "1.0.0",
dependencies = "required-after:OpenComputers@[1.2.0,)")
public class ModExampleTileEntity {
@Mod.Instance
public static ModExampleTileEntity instance;
public static BlockRadar radar;
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent e) {
radar = new BlockRadar(3660);
GameRegistry.registerBlock(radar, "oc:example_radar");
GameRegistry.registerTileEntity(TileEntityRadar.class, "oc:example_radar");
}
}
| added example code for the new SimpleComponent interface
| src/li/cil/oc/example/tileentity/ModExampleTileEntity.java | added example code for the new SimpleComponent interface | <ide><path>rc/li/cil/oc/example/tileentity/ModExampleTileEntity.java
<ide> public static ModExampleTileEntity instance;
<ide>
<ide> public static BlockRadar radar;
<add> public static BlockSimpleRadar simpleRadar;
<ide>
<ide> @Mod.EventHandler
<ide> public void preInit(FMLPreInitializationEvent e) {
<ide> radar = new BlockRadar(3660);
<ide> GameRegistry.registerBlock(radar, "oc:example_radar");
<ide> GameRegistry.registerTileEntity(TileEntityRadar.class, "oc:example_radar");
<add>
<add> simpleRadar = new BlockSimpleRadar(3661);
<add> GameRegistry.registerBlock(simpleRadar, "oc:example_simple_radar");
<add> GameRegistry.registerTileEntity(TileEntitySimpleRadar.class, "oc:example_simple_radar");
<ide> }
<ide> } |
|
Java | agpl-3.0 | 58514ac2c887dae257721d90cda9877d2df426ff | 0 | paulmartel/voltdb,VoltDB/voltdb,migue/voltdb,paulmartel/voltdb,deerwalk/voltdb,VoltDB/voltdb,VoltDB/voltdb,deerwalk/voltdb,simonzhangsm/voltdb,migue/voltdb,simonzhangsm/voltdb,deerwalk/voltdb,VoltDB/voltdb,deerwalk/voltdb,paulmartel/voltdb,deerwalk/voltdb,paulmartel/voltdb,VoltDB/voltdb,migue/voltdb,simonzhangsm/voltdb,migue/voltdb,paulmartel/voltdb,simonzhangsm/voltdb,deerwalk/voltdb,simonzhangsm/voltdb,paulmartel/voltdb,simonzhangsm/voltdb,VoltDB/voltdb,paulmartel/voltdb,simonzhangsm/voltdb,deerwalk/voltdb,migue/voltdb,migue/voltdb,simonzhangsm/voltdb,VoltDB/voltdb,migue/voltdb,migue/voltdb,deerwalk/voltdb,paulmartel/voltdb | /* This file is part of VoltDB.
* Copyright (C) 2008-2015 VoltDB Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.voltdb.iv2;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import org.junit.BeforeClass;
import org.junit.Test;
import org.voltcore.messaging.TransactionInfoBaseMessage;
import org.voltcore.messaging.VoltMessage;
import org.voltdb.ParameterSet;
import org.voltdb.StoredProcedureInvocation;
import org.voltdb.TheHashinator;
import org.voltdb.TheHashinator.HashinatorType;
import org.voltdb.messaging.CompleteTransactionMessage;
import org.voltdb.messaging.FragmentTaskMessage;
import org.voltdb.messaging.InitiateTaskMessage;
import org.voltdb.messaging.Iv2InitiateTaskMessage;
import org.voltdb.messaging.Iv2RepairLogResponseMessage;
public class TestRepairLog
{
VoltMessage truncInitMsg(long truncPt, long handle)
{
Iv2InitiateTaskMessage msg = mock(Iv2InitiateTaskMessage.class);
when(msg.getTruncationHandle()).thenReturn(truncPt);
when(msg.getSpHandle()).thenReturn(handle);
return msg;
}
VoltMessage nonTruncInitMsg()
{
return truncInitMsg(Long.MIN_VALUE, 0);
}
VoltMessage truncFragMsg(long truncPt, long mpTxnId)
{
FragmentTaskMessage msg = mock(FragmentTaskMessage.class);
when(msg.getTxnId()).thenReturn(mpTxnId);
when(msg.getTruncationHandle()).thenReturn(truncPt);
return msg;
}
VoltMessage truncCompleteMsg(long truncPt, long mpTxnId)
{
CompleteTransactionMessage msg = mock(CompleteTransactionMessage.class);
when(msg.getTxnId()).thenReturn(mpTxnId);
when(msg.getTruncationHandle()).thenReturn(truncPt);
return msg;
}
// a message that should never be logged.
private static class FooMessage extends VoltMessage
{
@Override
protected void initFromBuffer(ByteBuffer buf) throws IOException {
}
@Override
public void flattenToBuffer(ByteBuffer buf) throws IOException {
}
}
@BeforeClass
static public void initializeHashinator() {
TheHashinator.setConfiguredHashinatorType(HashinatorType.ELASTIC);
TheHashinator.initialize(TheHashinator.getConfiguredHashinatorClass(), TheHashinator.getConfigureBytes(8));
}
@Test
public void testOffer()
{
// offer some various messages to log and check
// that it keeps the expected ones.
RepairLog rl = new RepairLog();
VoltMessage m1 = nonTruncInitMsg();
VoltMessage m2 = nonTruncInitMsg();
rl.deliver(m1);
rl.deliver(m2);
List<Iv2RepairLogResponseMessage> contents = rl.contents(1l, false);
assertEquals(3, contents.size());
assertEquals(m1, contents.get(1).getPayload());
assertEquals(m2, contents.get(2).getPayload());
}
@Test
public void testOfferWithTruncation()
{
RepairLog rl = new RepairLog();
// add m1
VoltMessage m1 = truncInitMsg(0L, 1L);
rl.deliver(m1);
assertEquals(2, rl.contents(1L, false).size());
// add m2
VoltMessage m2 = truncInitMsg(0L, 2L);
rl.deliver(m2);
assertEquals(3, rl.contents(1L, false).size());
// trim m1. add m3
VoltMessage m3 = truncInitMsg(1L, 3L);
rl.deliver(m3);
assertEquals(3, rl.contents(1L, false).size());
assertEquals(m2, rl.contents(1L, false).get(1).getPayload());
assertEquals(2L, rl.contents(1L, false).get(1).getHandle());
assertEquals(m3, rl.contents(1L, false).get(2).getPayload());
assertEquals(3L, rl.contents(1L, false).get(2).getHandle());
}
@Test
public void testOfferUneededMessage()
{
RepairLog rl = new RepairLog();
VoltMessage m1 = truncInitMsg(0L, 1L);
rl.deliver(m1);
// deliver a non-logged message (this is the test).
rl.deliver(new FooMessage());
VoltMessage m2 = truncInitMsg(0L, 2L);
rl.deliver(m2);
assertEquals(3, rl.contents(1L, false).size());
assertEquals(m1, rl.contents(1L, false).get(1).getPayload());
assertEquals(m2, rl.contents(1L, false).get(2).getPayload());
}
@Test
public void testOfferFragmentTaskMessage()
{
RepairLog rl = new RepairLog();
// trunc(trunc point, txnId).
VoltMessage m1 = truncFragMsg(0L, 1L);
rl.deliver(m1);
assertEquals(2, rl.contents(1L, false).size());
VoltMessage m2 = truncFragMsg(0L, 2L);
rl.deliver(m2);
assertEquals(3, rl.contents(1L, false).size());
// only the first message for a transaction is logged.
VoltMessage m2b = truncFragMsg(0L, 2L);
rl.deliver(m2b);
assertEquals(3, rl.contents(1L, false).size());
// trim m1. add m3
VoltMessage m3 = truncFragMsg(1L, 3L);
rl.deliver(m3);
assertEquals(3, rl.contents(1L, false).size());
assertEquals(m2, rl.contents(1L, false).get(1).getPayload());
assertEquals(2L, rl.contents(1L, false).get(1).getTxnId());
assertEquals(m3, rl.contents(1L, false).get(2).getPayload());
assertEquals(3L, rl.contents(1L, false).get(2).getTxnId());
}
@Test
public void testOfferCompleteMessage()
{
RepairLog rl = new RepairLog();
// trunc(trunc point, txnId).
VoltMessage m1 = truncCompleteMsg(0L, 1L);
rl.deliver(m1);
assertEquals(2, rl.contents(1L, false).size());
VoltMessage m2 = truncCompleteMsg(0L, 2L);
rl.deliver(m2);
assertEquals(3, rl.contents(1L, false).size());
// trim m1. add m3
VoltMessage m3 = truncCompleteMsg(1L, 3L);
rl.deliver(m3);
assertEquals(3, rl.contents(1L, false).size());
assertEquals(m2, rl.contents(1L, false).get(1).getPayload());
assertEquals(2L, rl.contents(1L, false).get(1).getTxnId());
assertEquals(m3, rl.contents(1L, false).get(2).getPayload());
assertEquals(3L, rl.contents(1L, false).get(2).getTxnId());
}
@Test
public void testTruncationAfterPromotion()
{
RepairLog rl = new RepairLog();
VoltMessage m1 = truncInitMsg(0L, 1L);
rl.deliver(m1);
VoltMessage m2 = truncInitMsg(0L, 2L);
rl.deliver(m2);
assertEquals(3, rl.contents(1L, false).size());
rl.setLeaderState(true);
assertEquals(1, rl.contents(1L, false).size());
}
// validate the invariants on the RepairLog contents:
// Every entry in the log should have a unique, constantly increasing SP handle.
// There should be only one FragmentTaskMessage per MP TxnID
// There should be at most one FragmentTaskMessage uncovered by a CompleteTransactionMessage
// There should be no CompleteTransactionMessages indicating restart
private void validateRepairLog(List<Iv2RepairLogResponseMessage> stuff, long binaryLogUniqueId)
{
long prevHandle = Long.MIN_VALUE;
Long mpTxnId = null;
for (Iv2RepairLogResponseMessage imsg : stuff) {
if (imsg.getSequence() > 0) {
assertTrue(imsg.getHandle() > prevHandle);
prevHandle = imsg.getHandle();
if (imsg.getPayload() instanceof FragmentTaskMessage) {
assertEquals(null, mpTxnId);
mpTxnId = imsg.getTxnId();
} else if (imsg.getPayload() instanceof CompleteTransactionMessage) {
// can see bare CompleteTransactionMessage, but if we've got an MP
// in progress this should close it
assertFalse(((CompleteTransactionMessage)imsg.getPayload()).isRestart());
if (mpTxnId != null) {
assertEquals((long)mpTxnId, imsg.getTxnId());
}
mpTxnId = null;
}
} else {
assertTrue(imsg.hasHashinatorConfig());
assertEquals(binaryLogUniqueId, imsg.getBinaryLogUniqueId());
}
}
}
public static long setBinaryLogUniqueId(TransactionInfoBaseMessage msg, UniqueIdGenerator uig) {
Iv2InitiateTaskMessage taskMsg = null;
if (msg instanceof Iv2InitiateTaskMessage) {
taskMsg = (Iv2InitiateTaskMessage) msg;
} else if (msg instanceof FragmentTaskMessage) {
taskMsg = ((FragmentTaskMessage) msg).getInitiateTask();
}
if (taskMsg != null && taskMsg.getStoredProcedureName().startsWith("@ApplyBinaryLog")) {
ParameterSet params = taskMsg.getStoredProcedureInvocation().getParams();
long uid = uig.getNextUniqueId();
when(params.toArray()).thenReturn(new Object[] {null, 0l, 0l, uid, null});
return uid;
}
return Long.MIN_VALUE;
}
@Test
public void testFuzz()
{
TxnEgo sphandle = TxnEgo.makeZero(0);
UniqueIdGenerator uig = new UniqueIdGenerator(0, 0);
UniqueIdGenerator spbuig = new UniqueIdGenerator(0, 0);
UniqueIdGenerator mpbuig = new UniqueIdGenerator(0, 0);
sphandle = sphandle.makeNext();
RandomMsgGenerator msgGen = new RandomMsgGenerator();
RepairLog dut = new RepairLog();
long binaryLogSpUniqueId = Long.MIN_VALUE;
long binaryLogMpUniqueId = Long.MIN_VALUE;
for (int i = 0; i < 4000; i++) {
// get next message, update the sphandle according to SpScheduler rules,
// but only submit messages that would have been forwarded by the master
// to the repair log.
TransactionInfoBaseMessage msg = msgGen.generateRandomMessageInStream();
msg.setSpHandle(sphandle.getTxnId());
if (msg instanceof InitiateTaskMessage) {
binaryLogSpUniqueId = Math.max(binaryLogSpUniqueId, setBinaryLogUniqueId(msg, spbuig));
} else if (msg instanceof FragmentTaskMessage) {
binaryLogMpUniqueId = Math.max(binaryLogMpUniqueId, setBinaryLogUniqueId(msg, mpbuig));
}
sphandle = sphandle.makeNext();
if (!msg.isReadOnly() || msg instanceof CompleteTransactionMessage) {
dut.deliver(msg);
}
}
List<Iv2RepairLogResponseMessage> stuff = dut.contents(1l, false);
validateRepairLog(stuff, binaryLogSpUniqueId);
// Also check the MP version
stuff = dut.contents(1l, true);
validateRepairLog(stuff, binaryLogMpUniqueId);
}
@Test
public void testComparator()
{
RepairLog dut = new RepairLog();
Random rand = new Random();
List<RepairLog.Item> items = new ArrayList<RepairLog.Item>();
for (int i = 0; i < 1000000; i++) {
RepairLog.Item item = new RepairLog.Item(true, null, rand.nextInt(), i);
items.add(item);
}
Collections.sort(items, dut.m_handleComparator);
}
@Test
public void testTrackBinaryLogUniqueId() {
// The end unique id for an @ApplyBinaryLogSP invocation is recorded
// as its fifth parameter. Create a realistic invocation, deliver it
// to the repair log, and see what we get
final long endUniqueId = 42;
StoredProcedureInvocation spi = new StoredProcedureInvocation();
spi.setProcName("@ApplyBinaryLogSP");
spi.setParams(0, endUniqueId - 10, endUniqueId, endUniqueId, new byte[]{0});
spi.setOriginalUniqueId(endUniqueId - 10);
spi.setOriginalTxnId(endUniqueId -15);
Iv2InitiateTaskMessage msg =
new Iv2InitiateTaskMessage(0l, 0l, 0l, Long.MIN_VALUE, 0l, false, true,
spi, 0l, 0l, false);
msg.setSpHandle(900l);
RepairLog log = new RepairLog();
log.deliver(msg);
validateRepairLog(log.contents(1l, false), endUniqueId);
}
}
| tests/frontend/org/voltdb/iv2/TestRepairLog.java | /* This file is part of VoltDB.
* Copyright (C) 2008-2015 VoltDB Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.voltdb.iv2;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import org.junit.BeforeClass;
import org.junit.Test;
import org.voltcore.messaging.TransactionInfoBaseMessage;
import org.voltcore.messaging.VoltMessage;
import org.voltdb.ParameterSet;
import org.voltdb.StoredProcedureInvocation;
import org.voltdb.TheHashinator;
import org.voltdb.TheHashinator.HashinatorType;
import org.voltdb.messaging.CompleteTransactionMessage;
import org.voltdb.messaging.FragmentTaskMessage;
import org.voltdb.messaging.InitiateTaskMessage;
import org.voltdb.messaging.Iv2InitiateTaskMessage;
import org.voltdb.messaging.Iv2RepairLogResponseMessage;
public class TestRepairLog
{
VoltMessage truncInitMsg(long truncPt, long handle)
{
Iv2InitiateTaskMessage msg = mock(Iv2InitiateTaskMessage.class);
when(msg.getTruncationHandle()).thenReturn(truncPt);
when(msg.getSpHandle()).thenReturn(handle);
return msg;
}
VoltMessage nonTruncInitMsg()
{
return truncInitMsg(Long.MIN_VALUE, 0);
}
VoltMessage truncFragMsg(long truncPt, long mpTxnId)
{
FragmentTaskMessage msg = mock(FragmentTaskMessage.class);
when(msg.getTxnId()).thenReturn(mpTxnId);
when(msg.getTruncationHandle()).thenReturn(truncPt);
return msg;
}
VoltMessage truncCompleteMsg(long truncPt, long mpTxnId)
{
CompleteTransactionMessage msg = mock(CompleteTransactionMessage.class);
when(msg.getTxnId()).thenReturn(mpTxnId);
when(msg.getTruncationHandle()).thenReturn(truncPt);
return msg;
}
// a message that should never be logged.
private static class FooMessage extends VoltMessage
{
@Override
protected void initFromBuffer(ByteBuffer buf) throws IOException {
}
@Override
public void flattenToBuffer(ByteBuffer buf) throws IOException {
}
}
@BeforeClass
static public void initializeHashinator() {
TheHashinator.setConfiguredHashinatorType(HashinatorType.ELASTIC);
TheHashinator.initialize(TheHashinator.getConfiguredHashinatorClass(), TheHashinator.getConfigureBytes(8));
}
@Test
public void testOffer()
{
// offer some various messages to log and check
// that it keeps the expected ones.
RepairLog rl = new RepairLog();
VoltMessage m1 = nonTruncInitMsg();
VoltMessage m2 = nonTruncInitMsg();
rl.deliver(m1);
rl.deliver(m2);
List<Iv2RepairLogResponseMessage> contents = rl.contents(1l, false);
assertEquals(3, contents.size());
assertEquals(m1, contents.get(1).getPayload());
assertEquals(m2, contents.get(2).getPayload());
}
@Test
public void testOfferWithTruncation()
{
RepairLog rl = new RepairLog();
// add m1
VoltMessage m1 = truncInitMsg(0L, 1L);
rl.deliver(m1);
assertEquals(2, rl.contents(1L, false).size());
// add m2
VoltMessage m2 = truncInitMsg(0L, 2L);
rl.deliver(m2);
assertEquals(3, rl.contents(1L, false).size());
// trim m1. add m3
VoltMessage m3 = truncInitMsg(1L, 3L);
rl.deliver(m3);
assertEquals(3, rl.contents(1L, false).size());
assertEquals(m2, rl.contents(1L, false).get(1).getPayload());
assertEquals(2L, rl.contents(1L, false).get(1).getHandle());
assertEquals(m3, rl.contents(1L, false).get(2).getPayload());
assertEquals(3L, rl.contents(1L, false).get(2).getHandle());
}
@Test
public void testOfferUneededMessage()
{
RepairLog rl = new RepairLog();
VoltMessage m1 = truncInitMsg(0L, 1L);
rl.deliver(m1);
// deliver a non-logged message (this is the test).
rl.deliver(new FooMessage());
VoltMessage m2 = truncInitMsg(0L, 2L);
rl.deliver(m2);
assertEquals(3, rl.contents(1L, false).size());
assertEquals(m1, rl.contents(1L, false).get(1).getPayload());
assertEquals(m2, rl.contents(1L, false).get(2).getPayload());
}
@Test
public void testOfferFragmentTaskMessage()
{
RepairLog rl = new RepairLog();
// trunc(trunc point, txnId).
VoltMessage m1 = truncFragMsg(0L, 1L);
rl.deliver(m1);
assertEquals(2, rl.contents(1L, false).size());
VoltMessage m2 = truncFragMsg(0L, 2L);
rl.deliver(m2);
assertEquals(3, rl.contents(1L, false).size());
// only the first message for a transaction is logged.
VoltMessage m2b = truncFragMsg(0L, 2L);
rl.deliver(m2b);
assertEquals(3, rl.contents(1L, false).size());
// trim m1. add m3
VoltMessage m3 = truncFragMsg(1L, 3L);
rl.deliver(m3);
assertEquals(3, rl.contents(1L, false).size());
assertEquals(m2, rl.contents(1L, false).get(1).getPayload());
assertEquals(2L, rl.contents(1L, false).get(1).getTxnId());
assertEquals(m3, rl.contents(1L, false).get(2).getPayload());
assertEquals(3L, rl.contents(1L, false).get(2).getTxnId());
}
@Test
public void testOfferCompleteMessage()
{
RepairLog rl = new RepairLog();
// trunc(trunc point, txnId).
VoltMessage m1 = truncCompleteMsg(0L, 1L);
rl.deliver(m1);
assertEquals(2, rl.contents(1L, false).size());
VoltMessage m2 = truncCompleteMsg(0L, 2L);
rl.deliver(m2);
assertEquals(3, rl.contents(1L, false).size());
// trim m1. add m3
VoltMessage m3 = truncCompleteMsg(1L, 3L);
rl.deliver(m3);
assertEquals(3, rl.contents(1L, false).size());
assertEquals(m2, rl.contents(1L, false).get(1).getPayload());
assertEquals(2L, rl.contents(1L, false).get(1).getTxnId());
assertEquals(m3, rl.contents(1L, false).get(2).getPayload());
assertEquals(3L, rl.contents(1L, false).get(2).getTxnId());
}
@Test
public void testTruncationAfterPromotion()
{
RepairLog rl = new RepairLog();
VoltMessage m1 = truncInitMsg(0L, 1L);
rl.deliver(m1);
VoltMessage m2 = truncInitMsg(0L, 2L);
rl.deliver(m2);
assertEquals(3, rl.contents(1L, false).size());
rl.setLeaderState(true);
assertEquals(1, rl.contents(1L, false).size());
}
// validate the invariants on the RepairLog contents:
// Every entry in the log should have a unique, constantly increasing SP handle.
// There should be only one FragmentTaskMessage per MP TxnID
// There should be at most one FragmentTaskMessage uncovered by a CompleteTransactionMessage
// There should be no CompleteTransactionMessages indicating restart
private void validateRepairLog(List<Iv2RepairLogResponseMessage> stuff, long binaryLogUniqueId)
{
long prevHandle = Long.MIN_VALUE;
Long mpTxnId = null;
for (Iv2RepairLogResponseMessage imsg : stuff) {
if (imsg.getSequence() > 0) {
assertTrue(imsg.getHandle() > prevHandle);
prevHandle = imsg.getHandle();
if (imsg.getPayload() instanceof FragmentTaskMessage) {
assertEquals(null, mpTxnId);
mpTxnId = imsg.getTxnId();
} else if (imsg.getPayload() instanceof CompleteTransactionMessage) {
// can see bare CompleteTransactionMessage, but if we've got an MP
// in progress this should close it
assertFalse(((CompleteTransactionMessage)imsg.getPayload()).isRestart());
if (mpTxnId != null) {
assertEquals((long)mpTxnId, imsg.getTxnId());
}
mpTxnId = null;
}
} else {
assertTrue(imsg.hasHashinatorConfig());
assertEquals(binaryLogUniqueId, imsg.getBinaryLogUniqueId());
}
}
}
public static long setBinaryLogUniqueId(TransactionInfoBaseMessage msg, UniqueIdGenerator uig) {
Iv2InitiateTaskMessage taskMsg = null;
if (msg instanceof Iv2InitiateTaskMessage) {
taskMsg = (Iv2InitiateTaskMessage) msg;
} else if (msg instanceof FragmentTaskMessage) {
taskMsg = ((FragmentTaskMessage) msg).getInitiateTask();
}
if (taskMsg != null && taskMsg.getStoredProcedureName().startsWith("@ApplyBinaryLog")) {
ParameterSet params = taskMsg.getStoredProcedureInvocation().getParams();
long uid = uig.getNextUniqueId();
when(params.toArray()).thenReturn(new Object[] {null, 0l, 0l, uid, null});
return uid;
}
return Long.MIN_VALUE;
}
@Test
public void testFuzz()
{
TxnEgo sphandle = TxnEgo.makeZero(0);
UniqueIdGenerator uig = new UniqueIdGenerator(0, 0);
UniqueIdGenerator spbuig = new UniqueIdGenerator(0, 0);
UniqueIdGenerator mpbuig = new UniqueIdGenerator(0, 0);
sphandle = sphandle.makeNext();
RandomMsgGenerator msgGen = new RandomMsgGenerator();
RepairLog dut = new RepairLog();
long binaryLogSpUniqueId = Long.MIN_VALUE;
long binaryLogMpUniqueId = Long.MIN_VALUE;
for (int i = 0; i < 4000; i++) {
// get next message, update the sphandle according to SpScheduler rules,
// but only submit messages that would have been forwarded by the master
// to the repair log.
TransactionInfoBaseMessage msg = msgGen.generateRandomMessageInStream();
msg.setSpHandle(sphandle.getTxnId());
if (msg instanceof InitiateTaskMessage) {
binaryLogSpUniqueId = Math.max(binaryLogSpUniqueId, setBinaryLogUniqueId(msg, spbuig));
} else if (msg instanceof FragmentTaskMessage) {
binaryLogMpUniqueId = Math.max(binaryLogMpUniqueId, setBinaryLogUniqueId(msg, mpbuig));
}
sphandle = sphandle.makeNext();
if (!msg.isReadOnly() || msg instanceof CompleteTransactionMessage) {
dut.deliver(msg);
}
}
List<Iv2RepairLogResponseMessage> stuff = dut.contents(1l, false);
validateRepairLog(stuff, binaryLogSpUniqueId);
// Also check the MP version
stuff = dut.contents(1l, true);
validateRepairLog(stuff, binaryLogMpUniqueId);
}
@Test
public void testPerformance()
{
RepairLog dut = new RepairLog();
// First, add and truncate SP transactions with no MPs
dut.deliver(truncInitMsg(Long.MIN_VALUE, 0));
long start = System.currentTimeMillis();
for (int i = 0; i < 100000; i++)
{
VoltMessage msg = truncInitMsg(i, i + 1);
dut.deliver(msg);
}
long end = System.currentTimeMillis();
long duration1 = end - start;
System.out.println("Time to deliver 100,000 SPs: " + duration1);
// Now, add 40000 MP messages and then see how long it takes to do the SPs
dut = new RepairLog();
dut.deliver(truncInitMsg(Long.MIN_VALUE, 0));
for (int i = 0; i < 40000; i++) {
dut.deliver(truncCompleteMsg(Long.MIN_VALUE, i));
}
start = System.currentTimeMillis();
for (int i = 0; i < 100000; i++)
{
VoltMessage msg = truncInitMsg(i, i + 1);
dut.deliver(msg);
}
end = System.currentTimeMillis();
long duration2 = end - start;
System.out.println("Time to deliver 100,000 SPs: " + duration2);
// rough check, verify that the two don't differ by more than 20%
if (duration2 > duration1) {
long delta = Math.abs(duration2 - duration1);
float deltaPercent = delta / (float)duration1;
assertTrue("SP deliver performance with stored MP logs exceeds allowed hit of 20%, was: " +
(deltaPercent * 100) + "%.",
deltaPercent < .20);
}
}
@Test
public void testComparator()
{
RepairLog dut = new RepairLog();
Random rand = new Random();
List<RepairLog.Item> items = new ArrayList<RepairLog.Item>();
for (int i = 0; i < 1000000; i++) {
RepairLog.Item item = new RepairLog.Item(true, null, rand.nextInt(), i);
items.add(item);
}
Collections.sort(items, dut.m_handleComparator);
}
@Test
public void testTrackBinaryLogUniqueId() {
// The end unique id for an @ApplyBinaryLogSP invocation is recorded
// as its fifth parameter. Create a realistic invocation, deliver it
// to the repair log, and see what we get
final long endUniqueId = 42;
StoredProcedureInvocation spi = new StoredProcedureInvocation();
spi.setProcName("@ApplyBinaryLogSP");
spi.setParams(0, endUniqueId - 10, endUniqueId, endUniqueId, new byte[]{0});
spi.setOriginalUniqueId(endUniqueId - 10);
spi.setOriginalTxnId(endUniqueId -15);
Iv2InitiateTaskMessage msg =
new Iv2InitiateTaskMessage(0l, 0l, 0l, Long.MIN_VALUE, 0l, false, true,
spi, 0l, 0l, false);
msg.setSpHandle(900l);
RepairLog log = new RepairLog();
log.deliver(msg);
validateRepairLog(log.contents(1l, false), endUniqueId);
}
}
| ENG-9035: Remove RepairLog TestPerformance.
This test is very flaky. It really belongs to a system test instead of a
unit test. I can't think of a better way to test it in junit
reliably. Removing it for now, leaving the ticket open for a proper
system test.
Change-Id: I34137052c6a02ad17f629a9447b140e0cfc1716c
| tests/frontend/org/voltdb/iv2/TestRepairLog.java | ENG-9035: Remove RepairLog TestPerformance. | <ide><path>ests/frontend/org/voltdb/iv2/TestRepairLog.java
<ide> }
<ide>
<ide> @Test
<del> public void testPerformance()
<del> {
<del> RepairLog dut = new RepairLog();
<del> // First, add and truncate SP transactions with no MPs
<del> dut.deliver(truncInitMsg(Long.MIN_VALUE, 0));
<del> long start = System.currentTimeMillis();
<del> for (int i = 0; i < 100000; i++)
<del> {
<del> VoltMessage msg = truncInitMsg(i, i + 1);
<del> dut.deliver(msg);
<del> }
<del> long end = System.currentTimeMillis();
<del> long duration1 = end - start;
<del> System.out.println("Time to deliver 100,000 SPs: " + duration1);
<del>
<del> // Now, add 40000 MP messages and then see how long it takes to do the SPs
<del> dut = new RepairLog();
<del> dut.deliver(truncInitMsg(Long.MIN_VALUE, 0));
<del> for (int i = 0; i < 40000; i++) {
<del> dut.deliver(truncCompleteMsg(Long.MIN_VALUE, i));
<del> }
<del> start = System.currentTimeMillis();
<del> for (int i = 0; i < 100000; i++)
<del> {
<del> VoltMessage msg = truncInitMsg(i, i + 1);
<del> dut.deliver(msg);
<del> }
<del> end = System.currentTimeMillis();
<del> long duration2 = end - start;
<del> System.out.println("Time to deliver 100,000 SPs: " + duration2);
<del> // rough check, verify that the two don't differ by more than 20%
<del> if (duration2 > duration1) {
<del> long delta = Math.abs(duration2 - duration1);
<del> float deltaPercent = delta / (float)duration1;
<del> assertTrue("SP deliver performance with stored MP logs exceeds allowed hit of 20%, was: " +
<del> (deltaPercent * 100) + "%.",
<del> deltaPercent < .20);
<del> }
<del> }
<del>
<del> @Test
<ide> public void testComparator()
<ide> {
<ide> RepairLog dut = new RepairLog(); |
|
Java | mpl-2.0 | 47e88567dff2cced90e78f5c0e61c9aaf1f1b9fe | 0 | vertretungsplanme/substitution-schedule-parser,johan12345/substitution-schedule-parser,vertretungsplanme/substitution-schedule-parser,johan12345/substitution-schedule-parser | /*
* substitution-schedule-parser - Java library for parsing schools' substitution schedules
* Copyright (c) 2016 Johan v. Forstner
* Copyright (c) 2016 Nico Alt
*
* This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
* If a copy of the MPL was not distributed with this file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
package me.vertretungsplan.parser;
import me.vertretungsplan.exception.CredentialInvalidException;
import me.vertretungsplan.objects.Substitution;
import me.vertretungsplan.objects.SubstitutionSchedule;
import me.vertretungsplan.objects.SubstitutionScheduleData;
import me.vertretungsplan.objects.SubstitutionScheduleDay;
import me.vertretungsplan.objects.credential.Credential;
import me.vertretungsplan.objects.credential.UserPasswordCredential;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.http.client.HttpResponseException;
import org.joda.time.LocalDate;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.util.*;
/**
* Parser for LegionBoard, an open source changes management system for schools.
*
* More information on the <a href="https://legionboard.github.io">official website</a>
* and on its <a href="https://gitlab.com/groups/legionboard">project page on GitLab</a>.
*/
public class LegionBoardParser extends BaseParser {
private JSONObject data;
/**
* URL of given LegionBoard Heart instance
*/
private String api;
/**
* URL of given LegionBoard Eye instance
*/
private String website;
public LegionBoardParser(SubstitutionScheduleData scheduleData, CookieProvider cookieProvider) {
super(scheduleData, cookieProvider);
data = scheduleData.getData();
try {
api = data.getString("api");
website = data.getString("website");
} catch (JSONException e) {
e.printStackTrace();
}
}
public SubstitutionSchedule getSubstitutionSchedule() throws IOException, JSONException, CredentialInvalidException {
final SubstitutionSchedule substitutionSchedule = SubstitutionSchedule.fromData(scheduleData);
substitutionSchedule.setClasses(getAllClasses());
substitutionSchedule.setTeachers(getAllTeachers());
substitutionSchedule.setWebsite(website);
final JSONArray changes = getChanges();
final JSONArray courses = getCourses();
final JSONArray teachers = getTeachers();
parseLegionBoard(substitutionSchedule, changes, courses, teachers);
return substitutionSchedule;
}
/**
* Returns authentication key as shown
* <a href="https://gitlab.com/legionboard/heart/blob/master/doc/README.md">in the documentation</a>.
*/
private String getAuthenticationKey(Credential credential) {
final UserPasswordCredential userPasswordCredential = (UserPasswordCredential) credential;
final String username = userPasswordCredential.getUsername();
final String password = userPasswordCredential.getPassword();
return DigestUtils.sha256Hex(username.toLowerCase() + "//" + password);
}
/**
* Returns a JSONArray with all changes from now to in one week.
* More information: <a href="https://gitlab.com/legionboard/heart/blob/master/doc/changes/list.md">List changes</a>
*/
private JSONArray getChanges() throws IOException, JSONException {
// Date (or alias of date) when the changes start
final String startBy = "now";
// Date (or alias of date) when the changes end
final String endBy = "i1w";
final String url = api + "/changes?startBy=" + startBy + "&endBy=" + endBy + "&k=" + getAuthenticationKey(getCredential());
return getJSONArray(url);
}
/**
* Returns a JSONArray with all courses.
* More information: <a href="https://gitlab.com/legionboard/heart/blob/master/doc/courses/list.md">List courses</a>
*/
private JSONArray getCourses() throws IOException, JSONException {
final String url = api + "/courses?k=" + getAuthenticationKey(getCredential());
return getJSONArray(url);
}
/**
* Returns a JSONArray with all teachers.
* More information: <a href="https://gitlab.com/legionboard/heart/blob/master/doc/teachers/list.md">List teachers</a>
*/
private JSONArray getTeachers() throws IOException, JSONException {
final String url = api + "/teachers?k=" + getAuthenticationKey(getCredential());
return getJSONArray(url);
}
private JSONArray getJSONArray(String url) throws IOException, JSONException {
try {
return new JSONArray(httpGet(url, "UTF-8"));
} catch (HttpResponseException httpResponseException) {
if (httpResponseException.getStatusCode() == 404) {
return null;
}
throw httpResponseException;
}
}
private void parseLegionBoard(SubstitutionSchedule substitutionSchedule, JSONArray changes, JSONArray courses, JSONArray teachers) throws IOException, JSONException {
if (changes == null) {
return;
}
// Link course IDs to their names
HashMap<String, String> coursesHashMap = null;
if (courses != null) {
coursesHashMap = new HashMap<>();
for (int i = 0; i < courses.length(); i++) {
JSONObject course = courses.getJSONObject(i);
coursesHashMap.put(course.getString("id"), course.getString("name"));
}
}
// Link teacher IDs to their names
HashMap<String, String> teachersHashMap = null;
if (teachers != null) {
teachersHashMap = new HashMap<>();
for (int i = 0; i < teachers.length(); i++) {
JSONObject teacher = teachers.getJSONObject(i);
teachersHashMap.put(teacher.getString("id"), teacher.getString("name"));
}
}
// Add changes to SubstitutionSchedule
LocalDate currentDate = null;
SubstitutionScheduleDay substitutionScheduleDay = new SubstitutionScheduleDay();
for (int i = 0; i < changes.length(); i++) {
if (currentDate == null) {
// Set date for the first time
currentDate = LocalDate.now();
substitutionScheduleDay = new SubstitutionScheduleDay();
substitutionScheduleDay.setDate(currentDate);
}
final JSONObject change = changes.getJSONObject(i);
final Substitution substitution = getSubstitution(change, coursesHashMap, teachersHashMap);
final LocalDate startingDate = new LocalDate(change.getString("startingDate"));
final LocalDate endingDate = new LocalDate(change.getString("endingDate"));
// Handle multi-day changes
if (!startingDate.isEqual(endingDate)) {
// If SubstitutionScheduleDay is not empty
if (substitutionScheduleDay.getSubstitutions() != null) {
substitutionSchedule.addDay(substitutionScheduleDay);
}
for (int k = 0; k < 7; k++) {
final LocalDate date = LocalDate.now().plusDays(k);
if ((date.isAfter(startingDate) || date.isEqual(startingDate)) &&
(date.isBefore(endingDate) || date.isEqual(endingDate))) {
substitutionScheduleDay = new SubstitutionScheduleDay();
substitutionScheduleDay.setDate(date);
substitutionScheduleDay.addSubstitution(substitution);
substitutionSchedule.addDay(substitutionScheduleDay);
}
}
continue;
}
// If starting date of change does not equal date of SubstitutionScheduleDay
if (!startingDate.isEqual(currentDate)) {
// If SubstitutionScheduleDay is not empty
if (substitutionScheduleDay.getSubstitutions() != null) {
substitutionSchedule.addDay(substitutionScheduleDay);
}
substitutionScheduleDay = new SubstitutionScheduleDay();
substitutionScheduleDay.setDate(startingDate);
}
substitutionScheduleDay.addSubstitution(substitution);
}
substitutionSchedule.addDay(substitutionScheduleDay);
}
private Substitution getSubstitution(JSONObject change, HashMap<String, String> coursesHashMap, HashMap<String, String> teachersHashMap) throws IOException, JSONException {
final Substitution substitution = new Substitution();
// Set class
final String classId = change.getString("course");
if (!classId.equals("0") && !change.isNull("course")) {
if (coursesHashMap == null) {
throw new IOException("Change references a course but courses are empty.");
}
final String singleClass = coursesHashMap.get(classId);
final HashSet<String> classes = new HashSet<>();
classes.add(singleClass);
substitution.setClasses(classes);
}
// Set type
String type = "Unknown";
switch (change.getString("type")) {
case "0":
type = "Entfall";
break;
case "1":
type = "Vertretung";
break;
case "2":
type = "Information";
break;
}
substitution.setType(type);
// Set color
substitution.setColor(colorProvider.getColor(type));
// Set covering teacher
final String coveringTeacherId = change.getString("coveringTeacher");
if (!coveringTeacherId.equals("0")) {
if (teachersHashMap == null) {
throw new IOException("Change references a covering teacher but teachers are empty.");
}
substitution.setTeacher(teachersHashMap.get(coveringTeacherId));
}
// Set teacher
final String teacherId = change.getString("teacher");
if (!teacherId.equals("0")) {
if (teachersHashMap == null) {
throw new IOException("Change references a teacher but teachers are empty.");
}
if (type.equals("Vertretung") || !coveringTeacherId.equals("0")) {
substitution.setPreviousTeacher(teachersHashMap.get(teacherId));
} else {
substitution.setTeacher(teachersHashMap.get(teacherId));
}
}
// Set description
substitution.setDesc(change.getString("text"));
// Set lesson
final String startingHour = change.getString("startingHour");
final String endingHour = change.getString("endingHour");
if (!startingHour.equals("") || !endingHour.equals("")) {
String lesson = "";
if (!startingHour.equals("") && endingHour.equals("")) {
lesson = "Ab " + startingHour;
}
if (startingHour.equals("") && !endingHour.equals("")) {
lesson = "Bis " + endingHour;
}
if (!startingHour.equals("") && !endingHour.equals("")) {
lesson = startingHour + " - " + endingHour;
}
substitution.setLesson(lesson);
}
return substitution;
}
@Override
public List<String> getAllClasses() throws IOException, JSONException {
final List<String> classes = new ArrayList<>();
final JSONArray courses = getCourses();
if (courses == null) {
return null;
}
for (int i = 0; i < courses.length(); i++) {
final JSONObject course = courses.getJSONObject(i);
classes.add(course.getString("name"));
}
Collections.sort(classes);
return classes;
}
@Override
public List<String> getAllTeachers() throws IOException, JSONException {
final List<String> teachers = new ArrayList<>();
final JSONArray jsonTeachers = getTeachers();
if (jsonTeachers == null) {
return null;
}
for (int i = 0; i < jsonTeachers.length(); i++) {
final JSONObject teacher = jsonTeachers.getJSONObject(i);
teachers.add(teacher.getString("name"));
}
Collections.sort(teachers);
return teachers;
}
}
| parser/src/main/java/me/vertretungsplan/parser/LegionBoardParser.java | /*
* substitution-schedule-parser - Java library for parsing schools' substitution schedules
* Copyright (c) 2016 Johan v. Forstner
* Copyright (c) 2016 Nico Alt
*
* This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
* If a copy of the MPL was not distributed with this file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
package me.vertretungsplan.parser;
import me.vertretungsplan.exception.CredentialInvalidException;
import me.vertretungsplan.objects.Substitution;
import me.vertretungsplan.objects.SubstitutionSchedule;
import me.vertretungsplan.objects.SubstitutionScheduleData;
import me.vertretungsplan.objects.SubstitutionScheduleDay;
import me.vertretungsplan.objects.credential.Credential;
import me.vertretungsplan.objects.credential.UserPasswordCredential;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.http.client.HttpResponseException;
import org.joda.time.LocalDate;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.util.*;
/**
* Parser for LegionBoard, an open source changes management system for schools.
*
* More information on the <a href="https://legionboard.github.io">official website</a>
* and on its <a href="https://gitlab.com/groups/legionboard">project page on GitLab</a>.
*/
public class LegionBoardParser extends BaseParser {
private JSONObject data;
/**
* URL of given LegionBoard Heart instance
*/
private String api;
/**
* URL of given LegionBoard Eye instance
*/
private String website;
public LegionBoardParser(SubstitutionScheduleData scheduleData, CookieProvider cookieProvider) {
super(scheduleData, cookieProvider);
data = scheduleData.getData();
try {
api = data.getString("api");
website = data.getString("website");
} catch (JSONException e) {
e.printStackTrace();
}
}
public SubstitutionSchedule getSubstitutionSchedule() throws IOException, JSONException, CredentialInvalidException {
final SubstitutionSchedule substitutionSchedule = SubstitutionSchedule.fromData(scheduleData);
substitutionSchedule.setClasses(getAllClasses());
substitutionSchedule.setTeachers(getAllTeachers());
substitutionSchedule.setWebsite(website);
final JSONArray changes = getChanges();
final JSONArray courses = getCourses();
final JSONArray teachers = getTeachers();
parseLegionBoard(substitutionSchedule, changes, courses, teachers);
return substitutionSchedule;
}
/**
* Returns authentication key as shown
* <a href="https://gitlab.com/legionboard/heart/blob/master/doc/README.md">in the documentation</a>.
*/
private String getAuthenticationKey(Credential credential) {
final UserPasswordCredential userPasswordCredential = (UserPasswordCredential) credential;
final String username = userPasswordCredential.getUsername();
final String password = userPasswordCredential.getPassword();
return DigestUtils.sha256Hex(username.toLowerCase() + "//" + password);
}
/**
* Returns a JSONArray with all changes from now to in one week.
* More information: <a href="https://gitlab.com/legionboard/heart/blob/master/doc/changes/list.md">List changes</a>
*/
private JSONArray getChanges() throws IOException, JSONException {
// Date (or alias of date) when the changes start
final String startBy = "now";
// Date (or alias of date) when the changes end
final String endBy = "i1w";
final String url = api + "/changes?startBy=" + startBy + "&endBy=" + endBy + "&k=" + getAuthenticationKey(getCredential());
return getJSONArray(url);
}
/**
* Returns a JSONArray with all courses.
* More information: <a href="https://gitlab.com/legionboard/heart/blob/master/doc/courses/list.md">List courses</a>
*/
private JSONArray getCourses() throws IOException, JSONException {
final String url = api + "/courses?k=" + getAuthenticationKey(getCredential());
return getJSONArray(url);
}
/**
* Returns a JSONArray with all teachers.
* More information: <a href="https://gitlab.com/legionboard/heart/blob/master/doc/teachers/list.md">List teachers</a>
*/
private JSONArray getTeachers() throws IOException, JSONException {
final String url = api + "/teachers?k=" + getAuthenticationKey(getCredential());
return getJSONArray(url);
}
private JSONArray getJSONArray(String url) throws IOException, JSONException {
try {
return new JSONArray(httpGet(url, "UTF-8"));
} catch (HttpResponseException httpResponseException) {
if (httpResponseException.getStatusCode() == 404) {
return null;
}
throw httpResponseException;
}
}
private void parseLegionBoard(SubstitutionSchedule substitutionSchedule, JSONArray changes, JSONArray courses, JSONArray teachers) throws IOException, JSONException {
if (changes == null) {
return;
}
// Link course IDs to their names
HashMap<String, String> coursesHashMap = null;
if (courses != null) {
coursesHashMap = new HashMap<>();
for (int i = 0; i < courses.length(); i++) {
JSONObject course = courses.getJSONObject(i);
coursesHashMap.put(course.getString("id"), course.getString("name"));
}
}
// Link teacher IDs to their names
HashMap<String, String> teachersHashMap = null;
if (teachers != null) {
teachersHashMap = new HashMap<>();
for (int i = 0; i < teachers.length(); i++) {
JSONObject teacher = teachers.getJSONObject(i);
teachersHashMap.put(teacher.getString("id"), teacher.getString("name"));
}
}
// Add changes to SubstitutionSchedule
LocalDate currentDate = null;
SubstitutionScheduleDay substitutionScheduleDay = new SubstitutionScheduleDay();
for (int i = 0; i < changes.length(); i++) {
if (currentDate == null) {
// Set date for the first time
currentDate = LocalDate.now();
substitutionScheduleDay = new SubstitutionScheduleDay();
substitutionScheduleDay.setDate(currentDate);
}
final JSONObject change = changes.getJSONObject(i);
final Substitution substitution = getSubstitution(change, coursesHashMap, teachersHashMap);
final LocalDate startingDate = new LocalDate(change.getString("startingDate"));
final LocalDate endingDate = new LocalDate(change.getString("endingDate"));
// Handle multi-day changes
if (!startingDate.isEqual(endingDate)) {
// If SubstitutionScheduleDay is not empty
if (substitutionScheduleDay.getSubstitutions() != null) {
substitutionSchedule.addDay(substitutionScheduleDay);
}
for (int k = 0; k < 7; k++) {
final LocalDate date = LocalDate.now().plusDays(k);
if ((date.isAfter(startingDate) || date.isEqual(startingDate)) &&
(date.isBefore(endingDate) || date.isEqual(endingDate))) {
substitutionScheduleDay = new SubstitutionScheduleDay();
substitutionScheduleDay.setDate(date);
substitutionScheduleDay.addSubstitution(substitution);
substitutionSchedule.addDay(substitutionScheduleDay);
}
}
continue;
}
// If starting date of change does not equal date of SubstitutionScheduleDay
if (!startingDate.isEqual(currentDate)) {
// If SubstitutionScheduleDay is not empty
if (substitutionScheduleDay.getSubstitutions() != null) {
substitutionSchedule.addDay(substitutionScheduleDay);
}
substitutionScheduleDay = new SubstitutionScheduleDay();
substitutionScheduleDay.setDate(startingDate);
}
substitutionScheduleDay.addSubstitution(substitution);
}
substitutionSchedule.addDay(substitutionScheduleDay);
}
private Substitution getSubstitution(JSONObject change, HashMap<String, String> coursesHashMap, HashMap<String, String> teachersHashMap) throws IOException, JSONException {
final Substitution substitution = new Substitution();
// Set class
final String classId = change.getString("course");
if (!classId.equals("0") && classId != null) {
if (coursesHashMap == null) {
throw new IOException("Change references a course but courses are empty.");
}
final String singleClass = coursesHashMap.get(classId);
final HashSet<String> classes = new HashSet<>();
classes.add(singleClass);
substitution.setClasses(classes);
}
// Set type
String type = "Unknown";
switch (change.getString("type")) {
case "0":
type = "Entfall";
break;
case "1":
type = "Vertretung";
break;
case "2":
type = "Information";
break;
}
substitution.setType(type);
// Set color
substitution.setColor(colorProvider.getColor(type));
// Set covering teacher
final String coveringTeacherId = change.getString("coveringTeacher");
if (!coveringTeacherId.equals("0")) {
if (teachersHashMap == null) {
throw new IOException("Change references a covering teacher but teachers are empty.");
}
substitution.setTeacher(teachersHashMap.get(coveringTeacherId));
}
// Set teacher
final String teacherId = change.getString("teacher");
if (!teacherId.equals("0")) {
if (teachersHashMap == null) {
throw new IOException("Change references a teacher but teachers are empty.");
}
if (type.equals("Vertretung") || !coveringTeacherId.equals("0")) {
substitution.setPreviousTeacher(teachersHashMap.get(teacherId));
} else {
substitution.setTeacher(teachersHashMap.get(teacherId));
}
}
// Set description
substitution.setDesc(change.getString("text"));
// Set lesson
final String startingHour = change.getString("startingHour");
final String endingHour = change.getString("endingHour");
if (!startingHour.equals("") || !endingHour.equals("")) {
String lesson = "";
if (!startingHour.equals("") && endingHour.equals("")) {
lesson = "Ab " + startingHour;
}
if (startingHour.equals("") && !endingHour.equals("")) {
lesson = "Bis " + endingHour;
}
if (!startingHour.equals("") && !endingHour.equals("")) {
lesson = startingHour + " - " + endingHour;
}
substitution.setLesson(lesson);
}
return substitution;
}
@Override
public List<String> getAllClasses() throws IOException, JSONException {
final List<String> classes = new ArrayList<>();
final JSONArray courses = getCourses();
if (courses == null) {
return null;
}
for (int i = 0; i < courses.length(); i++) {
final JSONObject course = courses.getJSONObject(i);
classes.add(course.getString("name"));
}
Collections.sort(classes);
return classes;
}
@Override
public List<String> getAllTeachers() throws IOException, JSONException {
final List<String> teachers = new ArrayList<>();
final JSONArray jsonTeachers = getTeachers();
if (jsonTeachers == null) {
return null;
}
for (int i = 0; i < jsonTeachers.length(); i++) {
final JSONObject teacher = jsonTeachers.getJSONObject(i);
teachers.add(teacher.getString("name"));
}
Collections.sort(teachers);
return teachers;
}
}
| LegionBoard: fix bug with course being "null"
When upgrading LegionBoard Heart from 0.1.x to 0.2+, all existing changes get the course "null". Mistakenly, I thought the JSONObject would contain a "real" null, but the null is given as a string.
The following IOException is fixed with this commit:
java.io.IOException: Change references a course but courses are empty.
at me.vertretungsplan.parser.LegionBoardParser.getSubstitution(LegionBoardParser.java:201)
at me.vertretungsplan.parser.LegionBoardParser.parseLegionBoard(LegionBoardParser.java:160)
at me.vertretungsplan.parser.LegionBoardParser.getSubstitutionSchedule(LegionBoardParser.java:69)
| parser/src/main/java/me/vertretungsplan/parser/LegionBoardParser.java | LegionBoard: fix bug with course being "null" | <ide><path>arser/src/main/java/me/vertretungsplan/parser/LegionBoardParser.java
<ide> final Substitution substitution = new Substitution();
<ide> // Set class
<ide> final String classId = change.getString("course");
<del> if (!classId.equals("0") && classId != null) {
<add> if (!classId.equals("0") && !change.isNull("course")) {
<ide> if (coursesHashMap == null) {
<ide> throw new IOException("Change references a course but courses are empty.");
<ide> } |
|
JavaScript | mit | ad55adb5828ac40b8b4bd714a47c5f278d6f1e9d | 0 | fridge-cms/fridge.js,fridge-cms/fridge.js | import React from "react";
import PropTypes from "prop-types";
export default class Fridge extends React.Component {
static contextTypes = {
fridge: PropTypes.object.isRequired,
store: PropTypes.object.isRequired
};
static propTypes = {
children: PropTypes.func.isRequired
};
state = {
fridgeProps: {}
};
setId() {
const { store } = this.context;
this.id = store.getNextId();
}
componentWillMount() {
if (!this.id) this.setId();
const { store } = this.context;
const fridgeProps = store.get(this.id) || {};
this.setState({ fridgeProps });
}
componentWillUnmount() {
this.unmounted = true;
}
componentDidMount() {
if (this.shouldResolve()) this.getFridgeProps();
}
componentDidUpdate(prevProps, prevState) {
const { query } = this.props;
if (prevProps.query !== query) {
this.setId();
this.setState({ fridgeProps: null, resolved: false });
}
}
getFridgeProps = async () => {
const { fridge, store } = this.context;
const { query } = this.props;
if (!query) return true;
// getFridgeProps is being called from
// react-tree-walker, we need to set our id
if (!this.id) this.setId();
try {
this.resolving = true;
const queries = [].concat(query);
const fridgeProps = await Promise.all(queries.map(q => fridge.get(q)));
if (this.unmounted) return false;
store.register(this.id, fridgeProps);
this.resolving = false;
if (this.setState) {
this.setState({ fridgeProps, resolved: true });
}
} catch (err) {
this.resolving = false;
throw new Error("Failed to resolve connected Fridge component");
}
return true;
};
shouldResolve() {
return (
this.context.store.get(this.id) == null &&
typeof window !== "undefined" &&
!this.resolving
);
}
render() {
const { fridgeProps } = this.state;
const { children } = this.props;
if (this.shouldResolve()) this.getFridgeProps();
return this.resolving && !fridgeProps ? null : children(...fridgeProps);
}
}
| lib/Fridge.js | import React from "react";
import PropTypes from "prop-types";
export default class Fridge extends React.Component {
static contextTypes = {
fridge: PropTypes.object.isRequired,
store: PropTypes.object.isRequired
};
static propTypes = {
children: PropTypes.func.isRequired
};
state = {
fridgeProps: null
};
setId() {
const { store } = this.context;
this.id = store.getNextId();
}
componentWillMount() {
if (!this.id) this.setId();
const { store } = this.context;
const fridgeProps = store.get(this.id) || null;
this.setState({ fridgeProps });
}
componentWillUnmount() {
this.unmounted = true;
}
componentDidMount() {
if (this.shouldResolve()) this.getFridgeProps();
}
componentDidUpdate(prevProps, prevState) {
const { query } = this.props;
if (prevProps.query !== query) {
this.setId();
this.setState({ fridgeProps: null, resolved: false });
}
}
getFridgeProps = async () => {
const { fridge, store } = this.context;
const { query } = this.props;
if (!query) return true;
// getFridgeProps is being called from
// react-tree-walker, we need to set our id
if (!this.id) this.setId();
try {
this.resolving = true;
const queries = [].concat(query);
const fridgeProps = await Promise.all(queries.map(q => fridge.get(q)));
if (this.unmounted) return false;
store.register(this.id, fridgeProps);
this.resolving = false;
if (this.setState) {
this.setState({ fridgeProps, resolved: true });
}
} catch (err) {
this.resolving = false;
throw new Error("Failed to resolve connected Fridge component");
}
return true;
};
shouldResolve() {
return (
this.context.store.get(this.id) == null &&
typeof window !== "undefined" &&
!this.resolving
);
}
render() {
const { fridgeProps } = this.state;
const { children } = this.props;
if (this.shouldResolve()) this.getFridgeProps();
return this.resolving && !fridgeProps ? null : children(...fridgeProps);
}
}
| Update Fridge.js | lib/Fridge.js | Update Fridge.js | <ide><path>ib/Fridge.js
<ide> };
<ide>
<ide> state = {
<del> fridgeProps: null
<add> fridgeProps: {}
<ide> };
<ide>
<ide> setId() {
<ide> componentWillMount() {
<ide> if (!this.id) this.setId();
<ide> const { store } = this.context;
<del> const fridgeProps = store.get(this.id) || null;
<add> const fridgeProps = store.get(this.id) || {};
<ide>
<ide> this.setState({ fridgeProps });
<ide> } |
|
Java | apache-2.0 | 61df2a584122f8e4a034d1d0a60b243e93957b27 | 0 | jay-hodgson/SynapseWebClient,Sage-Bionetworks/SynapseWebClient,Sage-Bionetworks/SynapseWebClient,jay-hodgson/SynapseWebClient,Sage-Bionetworks/SynapseWebClient,jay-hodgson/SynapseWebClient,jay-hodgson/SynapseWebClient,Sage-Bionetworks/SynapseWebClient | package org.sagebionetworks.web.unitserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyList;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.sagebionetworks.repo.model.EntityBundle.ACCESS_REQUIREMENTS;
import static org.sagebionetworks.repo.model.EntityBundle.ANNOTATIONS;
import static org.sagebionetworks.repo.model.EntityBundle.ENTITY;
import static org.sagebionetworks.repo.model.EntityBundle.ENTITY_PATH;
import static org.sagebionetworks.repo.model.EntityBundle.HAS_CHILDREN;
import static org.sagebionetworks.repo.model.EntityBundle.PERMISSIONS;
import static org.sagebionetworks.repo.model.EntityBundle.ROOT_WIKI_ID;
import static org.sagebionetworks.repo.model.EntityBundle.UNMET_ACCESS_REQUIREMENTS;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Matchers;
import org.mockito.Mockito;
import org.sagebionetworks.client.SynapseClient;
import org.sagebionetworks.client.exceptions.SynapseException;
import org.sagebionetworks.client.exceptions.SynapseNotFoundException;
import org.sagebionetworks.evaluation.model.Evaluation;
import org.sagebionetworks.evaluation.model.EvaluationStatus;
import org.sagebionetworks.evaluation.model.Participant;
import org.sagebionetworks.evaluation.model.UserEvaluationPermissions;
import org.sagebionetworks.reflection.model.PaginatedResults;
import org.sagebionetworks.repo.model.ACCESS_TYPE;
import org.sagebionetworks.repo.model.AccessControlList;
import org.sagebionetworks.repo.model.AccessRequirement;
import org.sagebionetworks.repo.model.Annotations;
import org.sagebionetworks.repo.model.EntityBundle;
import org.sagebionetworks.repo.model.EntityHeader;
import org.sagebionetworks.repo.model.EntityIdList;
import org.sagebionetworks.repo.model.EntityPath;
import org.sagebionetworks.repo.model.ExampleEntity;
import org.sagebionetworks.repo.model.FileEntity;
import org.sagebionetworks.repo.model.Folder;
import org.sagebionetworks.repo.model.JoinTeamSignedToken;
import org.sagebionetworks.repo.model.LogEntry;
import org.sagebionetworks.repo.model.MembershipInvitation;
import org.sagebionetworks.repo.model.MembershipInvtnSubmission;
import org.sagebionetworks.repo.model.MembershipRequest;
import org.sagebionetworks.repo.model.MembershipRqstSubmission;
import org.sagebionetworks.repo.model.ObjectType;
import org.sagebionetworks.repo.model.Project;
import org.sagebionetworks.repo.model.ProjectHeader;
import org.sagebionetworks.repo.model.ProjectListSortColumn;
import org.sagebionetworks.repo.model.ProjectListType;
import org.sagebionetworks.repo.model.ResourceAccess;
import org.sagebionetworks.repo.model.RestrictableObjectDescriptor;
import org.sagebionetworks.repo.model.RestrictableObjectType;
import org.sagebionetworks.repo.model.SignedTokenInterface;
import org.sagebionetworks.repo.model.Team;
import org.sagebionetworks.repo.model.TeamMember;
import org.sagebionetworks.repo.model.TeamMembershipStatus;
import org.sagebionetworks.repo.model.TermsOfUseAccessRequirement;
import org.sagebionetworks.repo.model.UserGroup;
import org.sagebionetworks.repo.model.UserGroupHeader;
import org.sagebionetworks.repo.model.UserProfile;
import org.sagebionetworks.repo.model.UserSessionData;
import org.sagebionetworks.repo.model.auth.UserEntityPermissions;
import org.sagebionetworks.repo.model.doi.Doi;
import org.sagebionetworks.repo.model.doi.DoiStatus;
import org.sagebionetworks.repo.model.entity.query.SortDirection;
import org.sagebionetworks.repo.model.file.ChunkRequest;
import org.sagebionetworks.repo.model.file.ChunkedFileToken;
import org.sagebionetworks.repo.model.file.CompleteAllChunksRequest;
import org.sagebionetworks.repo.model.file.CompleteChunkedFileRequest;
import org.sagebionetworks.repo.model.file.CreateChunkedFileTokenRequest;
import org.sagebionetworks.repo.model.file.ExternalFileHandle;
import org.sagebionetworks.repo.model.file.FileHandleResults;
import org.sagebionetworks.repo.model.file.S3FileHandle;
import org.sagebionetworks.repo.model.file.State;
import org.sagebionetworks.repo.model.file.UploadDaemonStatus;
import org.sagebionetworks.repo.model.message.MessageToUser;
import org.sagebionetworks.repo.model.message.NotificationSettingsSignedToken;
import org.sagebionetworks.repo.model.message.Settings;
import org.sagebionetworks.repo.model.principal.AddEmailInfo;
import org.sagebionetworks.repo.model.project.ExternalS3StorageLocationSetting;
import org.sagebionetworks.repo.model.project.ExternalStorageLocationSetting;
import org.sagebionetworks.repo.model.project.ProjectSetting;
import org.sagebionetworks.repo.model.project.ProjectSettingsType;
import org.sagebionetworks.repo.model.project.StorageLocationSetting;
import org.sagebionetworks.repo.model.project.UploadDestinationListSetting;
import org.sagebionetworks.repo.model.provenance.Activity;
import org.sagebionetworks.repo.model.quiz.PassingRecord;
import org.sagebionetworks.repo.model.quiz.Quiz;
import org.sagebionetworks.repo.model.quiz.QuizResponse;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiHeader;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiHistorySnapshot;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiOrderHint;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiPage;
import org.sagebionetworks.repo.model.wiki.WikiHeader;
import org.sagebionetworks.repo.model.wiki.WikiPage;
import org.sagebionetworks.schema.adapter.AdapterFactory;
import org.sagebionetworks.schema.adapter.JSONObjectAdapter;
import org.sagebionetworks.schema.adapter.JSONObjectAdapterException;
import org.sagebionetworks.schema.adapter.org.json.AdapterFactoryImpl;
import org.sagebionetworks.schema.adapter.org.json.EntityFactory;
import org.sagebionetworks.schema.adapter.org.json.JSONObjectAdapterImpl;
import org.sagebionetworks.util.SerializationUtils;
import org.sagebionetworks.web.client.view.TeamRequestBundle;
import org.sagebionetworks.web.server.servlet.MarkdownCacheRequest;
import org.sagebionetworks.web.server.servlet.NotificationTokenType;
import org.sagebionetworks.web.server.servlet.ServiceUrlProvider;
import org.sagebionetworks.web.server.servlet.SynapseClientImpl;
import org.sagebionetworks.web.server.servlet.SynapseProvider;
import org.sagebionetworks.web.server.servlet.TokenProvider;
import org.sagebionetworks.web.shared.AccessRequirementUtils;
import org.sagebionetworks.web.shared.EntityBundlePlus;
import org.sagebionetworks.web.shared.OpenTeamInvitationBundle;
import org.sagebionetworks.web.shared.ProjectPagedResults;
import org.sagebionetworks.web.shared.TeamBundle;
import org.sagebionetworks.web.shared.TeamMemberBundle;
import org.sagebionetworks.web.shared.TeamMemberPagedResults;
import org.sagebionetworks.web.shared.WikiPageKey;
import org.sagebionetworks.web.shared.exceptions.BadRequestException;
import org.sagebionetworks.web.shared.exceptions.ConflictException;
import org.sagebionetworks.web.shared.exceptions.NotFoundException;
import org.sagebionetworks.web.shared.exceptions.RestServiceException;
import org.sagebionetworks.web.shared.users.AclUtils;
import org.sagebionetworks.web.shared.users.PermissionLevel;
import com.google.common.cache.Cache;
/**
* Test for the SynapseClientImpl
*
* @author John
*
*/
public class SynapseClientImplTest {
public static final String TEST_HOME_PAGE_BASE = "http://mysynapse.org/";
public static final String MY_USER_PROFILE_OWNER_ID = "MyOwnerID";
SynapseProvider mockSynapseProvider;
TokenProvider mockTokenProvider;
ServiceUrlProvider mockUrlProvider;
SynapseClient mockSynapse;
SynapseClientImpl synapseClient;
String entityId = "123";
String inviteeUserId = "900";
UserProfile inviteeUserProfile;
ExampleEntity entity;
Annotations annos;
UserEntityPermissions eup;
UserEvaluationPermissions userEvaluationPermissions;
List<EntityHeader> batchHeaderResults;
String testFileName = "testFileEntity.R";
EntityPath path;
org.sagebionetworks.reflection.model.PaginatedResults<UserGroup> pgugs;
org.sagebionetworks.reflection.model.PaginatedResults<UserProfile> pgups;
org.sagebionetworks.reflection.model.PaginatedResults<Team> pguts;
Team teamA, teamZ;
AccessControlList acl;
WikiPage page;
V2WikiPage v2Page;
S3FileHandle handle;
Evaluation mockEvaluation;
Participant mockParticipant;
UserSessionData mockUserSessionData;
UserProfile mockUserProfile;
MembershipInvtnSubmission testInvitation;
PaginatedResults mockPaginatedMembershipRequest;
Activity mockActivity;
MessageToUser sentMessage;
Long storageLocationId = 9090L;
UserProfile testUserProfile;
Long version = 1L;
//Token testing
NotificationSettingsSignedToken notificationSettingsToken;
JoinTeamSignedToken joinTeamToken;
String encodedJoinTeamToken, encodedNotificationSettingsToken;
private static final String testUserId = "myUserId";
private static final String EVAL_ID_1 = "eval ID 1";
private static final String EVAL_ID_2 = "eval ID 2";
private static JSONObjectAdapter jsonObjectAdapter = new JSONObjectAdapterImpl();
private static AdapterFactory adapterFactory = new AdapterFactoryImpl();
// private static JSONEntityFactory jsonEntityFactory = new JSONEntityFactoryImpl(
// adapterFactory);
private TeamMembershipStatus membershipStatus;
@Before
public void before() throws SynapseException, JSONObjectAdapterException {
mockSynapse = Mockito.mock(SynapseClient.class);
mockSynapseProvider = Mockito.mock(SynapseProvider.class);
mockUrlProvider = Mockito.mock(ServiceUrlProvider.class);
when(mockSynapseProvider.createNewClient()).thenReturn(mockSynapse);
mockTokenProvider = Mockito.mock(TokenProvider.class);
mockPaginatedMembershipRequest = Mockito.mock(PaginatedResults.class);
mockActivity = Mockito.mock(Activity.class);
when(mockPaginatedMembershipRequest.getTotalNumberOfResults()).thenReturn(3L);
synapseClient = new SynapseClientImpl();
synapseClient.setSynapseProvider(mockSynapseProvider);
synapseClient.setTokenProvider(mockTokenProvider);
synapseClient.setServiceUrlProvider(mockUrlProvider);
// Setup the the entity
entity = new ExampleEntity();
entity.setId(entityId);
entity.setEntityType(ExampleEntity.class.getName());
entity.setModifiedBy(testUserId);
// the mock synapse should return this object
when(mockSynapse.getEntityById(entityId)).thenReturn(entity);
// Setup the annotations
annos = new Annotations();
annos.setId(entityId);
annos.addAnnotation("string", "a string value");
// the mock synapse should return this object
when(mockSynapse.getAnnotations(entityId)).thenReturn(annos);
// Setup the Permissions
eup = new UserEntityPermissions();
eup.setCanDelete(true);
eup.setCanView(false);
eup.setOwnerPrincipalId(999L);
// the mock synapse should return this object
when(mockSynapse.getUsersEntityPermissions(entityId)).thenReturn(eup);
// user can change permissions on eval 2, but not on 1
userEvaluationPermissions = new UserEvaluationPermissions();
userEvaluationPermissions.setCanChangePermissions(false);
when(mockSynapse.getUserEvaluationPermissions(EVAL_ID_1)).thenReturn(
userEvaluationPermissions);
userEvaluationPermissions = new UserEvaluationPermissions();
userEvaluationPermissions.setCanChangePermissions(true);
when(mockSynapse.getUserEvaluationPermissions(EVAL_ID_2)).thenReturn(
userEvaluationPermissions);
// Setup the path
path = new EntityPath();
path.setPath(new ArrayList<EntityHeader>());
EntityHeader header = new EntityHeader();
header.setId(entityId);
header.setName("RomperRuuuu");
path.getPath().add(header);
// the mock synapse should return this object
when(mockSynapse.getEntityPath(entityId)).thenReturn(path);
pgugs = new org.sagebionetworks.reflection.model.PaginatedResults<UserGroup>();
List<UserGroup> ugs = new ArrayList<UserGroup>();
ugs.add(new UserGroup());
pgugs.setResults(ugs);
when(mockSynapse.getGroups(anyInt(), anyInt())).thenReturn(pgugs);
pgups = new org.sagebionetworks.reflection.model.PaginatedResults<UserProfile>();
List<UserProfile> ups = new ArrayList<UserProfile>();
ups.add(new UserProfile());
pgups.setResults(ups);
when(mockSynapse.getUsers(anyInt(), anyInt())).thenReturn(pgups);
pguts = new org.sagebionetworks.reflection.model.PaginatedResults<Team>();
List<Team> uts = new ArrayList<Team>();
teamZ = new Team();
teamZ.setId("1");
teamZ.setName("zygote");
uts.add(teamZ);
teamA = new Team();
teamA.setId("2");
teamA.setName("Amplitude");
uts.add(teamA);
pguts.setResults(uts);
when(mockSynapse.getTeamsForUser(anyString(), anyInt(), anyInt()))
.thenReturn(pguts);
acl = new AccessControlList();
acl.setId("sys999");
Set<ResourceAccess> ras = new HashSet<ResourceAccess>();
ResourceAccess ra = new ResourceAccess();
ra.setPrincipalId(101L);
ra.setAccessType(AclUtils
.getACCESS_TYPEs(PermissionLevel.CAN_ADMINISTER));
acl.setResourceAccess(ras);
when(mockSynapse.getACL(anyString())).thenReturn(acl);
when(mockSynapse.createACL((AccessControlList) any())).thenReturn(acl);
when(mockSynapse.updateACL((AccessControlList) any())).thenReturn(acl);
when(mockSynapse.updateACL((AccessControlList) any(), eq(true)))
.thenReturn(acl);
when(mockSynapse.updateACL((AccessControlList) any(), eq(false)))
.thenReturn(acl);
EntityHeader bene = new EntityHeader();
bene.setId("syn999");
when(mockSynapse.getEntityBenefactor(anyString())).thenReturn(bene);
org.sagebionetworks.reflection.model.PaginatedResults<EntityHeader> batchHeaders = new org.sagebionetworks.reflection.model.PaginatedResults<EntityHeader>();
batchHeaderResults = new ArrayList<EntityHeader>();
for (int i = 0; i < 10; i++) {
EntityHeader h = new EntityHeader();
h.setId("syn" + i);
batchHeaderResults.add(h);
}
batchHeaders.setResults(batchHeaderResults);
when(mockSynapse.getEntityHeaderBatch(anyList())).thenReturn(
batchHeaders);
List<AccessRequirement> accessRequirements = new ArrayList<AccessRequirement>();
accessRequirements.add(createAccessRequirement(ACCESS_TYPE.DOWNLOAD));
int mask = ENTITY | ANNOTATIONS | PERMISSIONS | ENTITY_PATH
| HAS_CHILDREN | ACCESS_REQUIREMENTS
| UNMET_ACCESS_REQUIREMENTS;
int emptyMask = 0;
EntityBundle bundle = new EntityBundle();
bundle.setEntity(entity);
bundle.setAnnotations(annos);
bundle.setPermissions(eup);
bundle.setPath(path);
bundle.setHasChildren(false);
bundle.setAccessRequirements(accessRequirements);
bundle.setUnmetAccessRequirements(accessRequirements);
when(mockSynapse.getEntityBundle(anyString(), Matchers.eq(mask)))
.thenReturn(bundle);
when(mockSynapse.getEntityBundle(anyString(), Matchers.eq(ENTITY | ANNOTATIONS | ROOT_WIKI_ID)))
.thenReturn(bundle);
EntityBundle emptyBundle = new EntityBundle();
when(mockSynapse.getEntityBundle(anyString(), Matchers.eq(emptyMask)))
.thenReturn(emptyBundle);
when(mockSynapse.canAccess("syn101", ACCESS_TYPE.READ))
.thenReturn(true);
page = new WikiPage();
page.setId("testId");
page.setMarkdown("my markdown");
page.setParentWikiId(null);
page.setTitle("A Title");
v2Page = new V2WikiPage();
v2Page.setId("v2TestId");
v2Page.setEtag("122333");
handle = new S3FileHandle();
handle.setId("4422");
handle.setBucketName("bucket");
handle.setFileName(testFileName);
handle.setKey("key");
when(mockSynapse.getRawFileHandle(anyString())).thenReturn(handle);
when(
mockSynapse
.completeChunkFileUpload(any(CompleteChunkedFileRequest.class)))
.thenReturn(handle);
org.sagebionetworks.reflection.model.PaginatedResults<AccessRequirement> ars = new org.sagebionetworks.reflection.model.PaginatedResults<AccessRequirement>();
ars.setTotalNumberOfResults(0);
ars.setResults(new ArrayList<AccessRequirement>());
when(
mockSynapse
.getAccessRequirements(any(RestrictableObjectDescriptor.class)))
.thenReturn(ars);
when(
mockSynapse.getUnmetAccessRequirements(
any(RestrictableObjectDescriptor.class),
any(ACCESS_TYPE.class))).thenReturn(ars);
mockEvaluation = Mockito.mock(Evaluation.class);
when(mockEvaluation.getStatus()).thenReturn(EvaluationStatus.OPEN);
when(mockSynapse.getEvaluation(anyString())).thenReturn(mockEvaluation);
mockUserSessionData = Mockito.mock(UserSessionData.class);
mockUserProfile = Mockito.mock(UserProfile.class);
when(mockSynapse.getUserSessionData()).thenReturn(mockUserSessionData);
when(mockUserSessionData.getProfile()).thenReturn(mockUserProfile);
when(mockUserProfile.getOwnerId()).thenReturn(MY_USER_PROFILE_OWNER_ID);
mockParticipant = Mockito.mock(Participant.class);
when(mockSynapse.getParticipant(anyString(), anyString())).thenReturn(
mockParticipant);
when(mockSynapse.getMyProfile()).thenReturn(mockUserProfile);
when(mockSynapse.createParticipant(anyString())).thenReturn(
mockParticipant);
UploadDaemonStatus status = new UploadDaemonStatus();
String fileHandleId = "myFileHandleId";
status.setFileHandleId(fileHandleId);
status.setState(State.COMPLETED);
when(mockSynapse.getCompleteUploadDaemonStatus(anyString()))
.thenReturn(status);
status = new UploadDaemonStatus();
status.setState(State.PROCESSING);
status.setPercentComplete(.05d);
when(mockSynapse.startUploadDeamon(any(CompleteAllChunksRequest.class)))
.thenReturn(status);
PaginatedResults<MembershipInvitation> openInvites = new PaginatedResults<MembershipInvitation>();
openInvites.setTotalNumberOfResults(0);
when(
mockSynapse.getOpenMembershipInvitations(anyString(),
anyString(), anyLong(), anyLong())).thenReturn(
openInvites);
PaginatedResults<MembershipRequest> openRequests = new PaginatedResults<MembershipRequest>();
openRequests.setTotalNumberOfResults(0);
when(
mockSynapse.getOpenMembershipRequests(anyString(), anyString(),
anyLong(), anyLong())).thenReturn(openRequests);
membershipStatus = new TeamMembershipStatus();
membershipStatus.setCanJoin(false);
membershipStatus.setHasOpenInvitation(false);
membershipStatus.setHasOpenRequest(false);
membershipStatus.setHasUnmetAccessRequirement(false);
membershipStatus.setIsMember(false);
membershipStatus.setMembershipApprovalRequired(false);
when(mockSynapse.getTeamMembershipStatus(anyString(), anyString()))
.thenReturn(membershipStatus);
sentMessage = new MessageToUser();
sentMessage.setId("987");
when(
mockSynapse.sendMessage(any(MessageToUser.class))).thenReturn(sentMessage);
// getMyProjects getUserProjects
PaginatedResults headers = new PaginatedResults<ProjectHeader>();
headers.setTotalNumberOfResults(1100);
List<ProjectHeader> projectHeaders = new ArrayList();
List<UserProfile> userProfile = new ArrayList();
projectHeaders.add(new ProjectHeader());
headers.setResults(projectHeaders);
when(
mockSynapse.getMyProjects(any(ProjectListType.class),
any(ProjectListSortColumn.class),
any(SortDirection.class), anyInt(), anyInt()))
.thenReturn(headers);
when(
mockSynapse.getProjectsFromUser(anyLong(),
any(ProjectListSortColumn.class),
any(SortDirection.class), anyInt(), anyInt()))
.thenReturn(headers);
when(
mockSynapse.getProjectsForTeam(anyLong(),
any(ProjectListSortColumn.class),
any(SortDirection.class), anyInt(), anyInt()))
.thenReturn(headers);
testUserProfile = new UserProfile();
testUserProfile.setUserName("Test User");
when(mockSynapse.getUserProfile(eq(testUserId))).thenReturn(
testUserProfile);
joinTeamToken = new JoinTeamSignedToken();
joinTeamToken.setHmac("98765");
joinTeamToken.setMemberId("1");
joinTeamToken.setTeamId("2");
joinTeamToken.setUserId("3");
encodedJoinTeamToken = SerializationUtils.serializeAndHexEncode(joinTeamToken);
notificationSettingsToken = new NotificationSettingsSignedToken();
notificationSettingsToken.setHmac("987654");
notificationSettingsToken.setSettings(new Settings());
notificationSettingsToken.setUserId("4");
encodedNotificationSettingsToken = SerializationUtils.serializeAndHexEncode(notificationSettingsToken);
}
private AccessRequirement createAccessRequirement(ACCESS_TYPE type) {
TermsOfUseAccessRequirement accessRequirement = new TermsOfUseAccessRequirement();
accessRequirement.setConcreteType(TermsOfUseAccessRequirement.class
.getName());
RestrictableObjectDescriptor descriptor = new RestrictableObjectDescriptor();
descriptor.setId("101");
descriptor.setType(RestrictableObjectType.ENTITY);
accessRequirement.setSubjectIds(Arrays
.asList(new RestrictableObjectDescriptor[] { descriptor }));
accessRequirement.setAccessType(type);
return accessRequirement;
}
private void setupTeamInvitations() throws SynapseException {
ArrayList<MembershipInvtnSubmission> testInvitations = new ArrayList<MembershipInvtnSubmission>();
testInvitation = new MembershipInvtnSubmission();
testInvitation.setId("628319");
testInvitation.setInviteeId(inviteeUserId);
testInvitations.add(testInvitation);
PaginatedResults<MembershipInvtnSubmission> paginatedInvitations = new PaginatedResults<MembershipInvtnSubmission>();
paginatedInvitations.setResults(testInvitations);
when(
mockSynapse.getOpenMembershipInvitationSubmissions(anyString(),
anyString(), anyLong(), anyLong())).thenReturn(
paginatedInvitations);
inviteeUserProfile = new UserProfile();
inviteeUserProfile.setUserName("Invitee User");
inviteeUserProfile.setOwnerId(inviteeUserId);
when(mockSynapse.getUserProfile(eq(inviteeUserId))).thenReturn(
inviteeUserProfile);
}
@Test
public void testGetEntityBundleAll() throws RestServiceException {
// Make sure we can get all parts of the bundel
int mask = ENTITY | ANNOTATIONS | PERMISSIONS | ENTITY_PATH
| HAS_CHILDREN | ACCESS_REQUIREMENTS
| UNMET_ACCESS_REQUIREMENTS;
EntityBundle bundle = synapseClient.getEntityBundle(entityId, mask);
assertNotNull(bundle);
// We should have all of the strings
assertNotNull(bundle.getEntity());
assertNotNull(bundle.getAnnotations());
assertNotNull(bundle.getPath());
assertNotNull(bundle.getPermissions());
assertNotNull(bundle.getHasChildren());
assertNotNull(bundle.getAccessRequirements());
assertNotNull(bundle.getUnmetAccessRequirements());
}
@Test
public void testGetEntityBundleNone() throws RestServiceException {
// Make sure all are null
int mask = 0x0;
EntityBundle bundle = synapseClient.getEntityBundle(entityId, mask);
assertNotNull(bundle);
// We should have all of the strings
assertNull(bundle.getEntity());
assertNull(bundle.getAnnotations());
assertNull(bundle.getPath());
assertNull(bundle.getPermissions());
assertNull(bundle.getHasChildren());
assertNull(bundle.getAccessRequirements());
assertNull(bundle.getUnmetAccessRequirements());
}
@Test(expected = IllegalArgumentException.class)
public void testParseEntityFromJsonNoType()
throws JSONObjectAdapterException {
ExampleEntity example = new ExampleEntity();
example.setName("some name");
example.setDescription("some description");
// do not set the type
String json = EntityFactory.createJSONStringForEntity(example);
// This will fail as the type is required
synapseClient.parseEntityFromJson(json);
}
@Test
public void testParseEntityFromJson() throws JSONObjectAdapterException {
ExampleEntity example = new ExampleEntity();
example.setName("some name");
example.setDescription("some description");
example.setEntityType(ExampleEntity.class.getName());
String json = EntityFactory.createJSONStringForEntity(example);
// System.out.println(json);
// Now make sure this can be read back
ExampleEntity clone = (ExampleEntity) synapseClient
.parseEntityFromJson(json);
assertEquals(example, clone);
}
@Test
public void testCreateOrUpdateEntityFalse()
throws JSONObjectAdapterException, RestServiceException,
SynapseException {
ExampleEntity in = new ExampleEntity();
in.setName("some name");
in.setDescription("some description");
in.setEntityType(ExampleEntity.class.getName());
ExampleEntity out = new ExampleEntity();
out.setName("some name");
out.setDescription("some description");
out.setEntityType(ExampleEntity.class.getName());
out.setId("syn123");
out.setEtag("45");
// when in comes in then return out.
when(mockSynapse.putEntity(in)).thenReturn(out);
String result = synapseClient.createOrUpdateEntity(in, null, false);
assertEquals(out.getId(), result);
verify(mockSynapse).putEntity(in);
}
@Test
public void testCreateOrUpdateEntityTrue()
throws JSONObjectAdapterException, RestServiceException,
SynapseException {
ExampleEntity in = new ExampleEntity();
in.setName("some name");
in.setDescription("some description");
in.setEntityType(ExampleEntity.class.getName());
ExampleEntity out = new ExampleEntity();
out.setName("some name");
out.setDescription("some description");
out.setEntityType(ExampleEntity.class.getName());
out.setId("syn123");
out.setEtag("45");
// when in comes in then return out.
when(mockSynapse.createEntity(in)).thenReturn(out);
String result = synapseClient.createOrUpdateEntity(in, null, true);
assertEquals(out.getId(), result);
verify(mockSynapse).createEntity(in);
}
@Test
public void testCreateOrUpdateEntityTrueWithAnnos()
throws JSONObjectAdapterException, RestServiceException,
SynapseException {
ExampleEntity in = new ExampleEntity();
in.setName("some name");
in.setDescription("some description");
in.setEntityType(ExampleEntity.class.getName());
Annotations annos = new Annotations();
annos.addAnnotation("someString", "one");
ExampleEntity out = new ExampleEntity();
out.setName("some name");
out.setDescription("some description");
out.setEntityType(ExampleEntity.class.getName());
out.setId("syn123");
out.setEtag("45");
// when in comes in then return out.
when(mockSynapse.createEntity(in)).thenReturn(out);
String result = synapseClient.createOrUpdateEntity(in, annos, true);
assertEquals(out.getId(), result);
verify(mockSynapse).createEntity(in);
annos.setEtag(out.getEtag());
annos.setId(out.getId());
verify(mockSynapse).updateAnnotations(out.getId(), annos);
}
@Test
public void testGetEntityBenefactorAcl() throws Exception {
EntityBundle bundle = new EntityBundle();
bundle.setBenefactorAcl(acl);
when(mockSynapse.getEntityBundle("syn101", EntityBundle.BENEFACTOR_ACL))
.thenReturn(bundle);
AccessControlList clone = synapseClient
.getEntityBenefactorAcl("syn101");
assertEquals(acl, clone);
}
@Test
public void testCreateAcl() throws Exception {
AccessControlList clone = synapseClient.createAcl(acl);
assertEquals(acl, clone);
}
@Test
public void testUpdateAcl() throws Exception {
AccessControlList clone = synapseClient.updateAcl(acl);
assertEquals(acl, clone);
}
@Test
public void testUpdateAclRecursive() throws Exception {
AccessControlList clone = synapseClient.updateAcl(acl, true);
assertEquals(acl, clone);
verify(mockSynapse).updateACL(any(AccessControlList.class), eq(true));
}
@Test
public void testDeleteAcl() throws Exception {
EntityBundle bundle = new EntityBundle();
bundle.setBenefactorAcl(acl);
when(mockSynapse.getEntityBundle("syn101", EntityBundle.BENEFACTOR_ACL))
.thenReturn(bundle);
AccessControlList clone = synapseClient.deleteAcl("syn101");
assertEquals(acl, clone);
}
@Test
public void testHasAccess() throws Exception {
assertTrue(synapseClient.hasAccess("syn101", "READ"));
}
@Test
public void testGetUserProfile() throws Exception {
// verify call is directly calling the synapse client provider
String testRepoUrl = "http://mytestrepourl";
when(mockUrlProvider.getRepositoryServiceUrl()).thenReturn(testRepoUrl);
UserProfile userProfile = synapseClient.getUserProfile(testUserId);
assertEquals(userProfile, testUserProfile);
}
@Test
public void testGetProjectById() throws Exception {
String projectId = "syn1029";
Project project = new Project();
project.setId(projectId);
when(mockSynapse.getEntityById(projectId)).thenReturn(project);
Project actualProject = synapseClient.getProject(projectId);
assertEquals(project, actualProject);
}
@Test
public void testGetJSONEntity() throws Exception {
JSONObject json = EntityFactory.createJSONObjectForEntity(entity);
Mockito.when(mockSynapse.getEntity(anyString())).thenReturn(json);
String testRepoUri = "/testservice";
synapseClient.getJSONEntity(testRepoUri);
// verify that this call uses Synapse.getEntity(testRepoUri)
verify(mockSynapse).getEntity(testRepoUri);
}
@Test
public void testGetWikiHeaderTree() throws Exception {
PaginatedResults<WikiHeader> headerTreeResults = new PaginatedResults<WikiHeader>();
when(mockSynapse.getWikiHeaderTree(anyString(), any(ObjectType.class)))
.thenReturn(headerTreeResults);
synapseClient.getWikiHeaderTree("testId", ObjectType.ENTITY.toString());
verify(mockSynapse).getWikiHeaderTree(anyString(),
any(ObjectType.class));
}
@Test
public void testGetWikiAttachmentHandles() throws Exception {
FileHandleResults testResults = new FileHandleResults();
Mockito.when(
mockSynapse
.getWikiAttachmenthHandles(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(testResults);
synapseClient.getWikiAttachmentHandles(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).getWikiAttachmenthHandles(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
}
@Test
public void testDeleteV2WikiPage() throws Exception {
synapseClient.deleteV2WikiPage(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).deleteV2WikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
}
@Test
public void testGetV2WikiPage() throws Exception {
Mockito.when(
mockSynapse
.getV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(v2Page);
synapseClient.getV2WikiPage(new WikiPageKey("syn123", ObjectType.ENTITY
.toString(), "20"));
verify(mockSynapse).getV2WikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
Mockito.when(
mockSynapse
.getVersionOfV2WikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class))).thenReturn(v2Page);
synapseClient.getVersionOfV2WikiPage(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse).getVersionOfV2WikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class));
}
@Test
public void testUpdateV2WikiPage() throws Exception {
Mockito.when(
mockSynapse.updateV2WikiPage(anyString(),
any(ObjectType.class), any(V2WikiPage.class)))
.thenReturn(v2Page);
synapseClient.updateV2WikiPage("testId", ObjectType.ENTITY.toString(),
v2Page);
verify(mockSynapse).updateV2WikiPage(anyString(),
any(ObjectType.class), any(V2WikiPage.class));
}
@Test
public void testRestoreV2WikiPage() throws Exception {
String wikiId = "syn123";
Mockito.when(
mockSynapse.restoreV2WikiPage(anyString(),
any(ObjectType.class), any(String.class), anyLong()))
.thenReturn(v2Page);
synapseClient.restoreV2WikiPage("ownerId",
ObjectType.ENTITY.toString(), wikiId, new Long(2));
verify(mockSynapse).restoreV2WikiPage(anyString(),
any(ObjectType.class), any(String.class), anyLong());
}
@Test
public void testGetV2WikiHeaderTree() throws Exception {
PaginatedResults<V2WikiHeader> headerTreeResults = new PaginatedResults<V2WikiHeader>();
when(
mockSynapse.getV2WikiHeaderTree(anyString(),
any(ObjectType.class))).thenReturn(headerTreeResults);
synapseClient.getV2WikiHeaderTree("testId",
ObjectType.ENTITY.toString());
verify(mockSynapse).getV2WikiHeaderTree(anyString(),
any(ObjectType.class));
}
@Test
public void testGetV2WikiOrderHint() throws Exception {
V2WikiOrderHint orderHint = new V2WikiOrderHint();
when(
mockSynapse
.getV2OrderHint(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(orderHint);
synapseClient.getV2WikiOrderHint(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).getV2OrderHint(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
}
@Test
public void testUpdateV2WikiOrderHint() throws Exception {
V2WikiOrderHint orderHint = new V2WikiOrderHint();
when(mockSynapse.updateV2WikiOrderHint(any(V2WikiOrderHint.class)))
.thenReturn(orderHint);
synapseClient.updateV2WikiOrderHint(orderHint);
verify(mockSynapse).updateV2WikiOrderHint(any(V2WikiOrderHint.class));
}
@Test
public void testGetV2WikiHistory() throws Exception {
PaginatedResults<V2WikiHistorySnapshot> historyResults = new PaginatedResults<V2WikiHistorySnapshot>();
when(
mockSynapse
.getV2WikiHistory(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class), any(Long.class))).thenReturn(
historyResults);
synapseClient.getV2WikiHistory(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"), new Long(10), new Long(0));
verify(mockSynapse).getV2WikiHistory(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class), any(Long.class));
}
@Test
public void testGetV2WikiAttachmentHandles() throws Exception {
FileHandleResults testResults = new FileHandleResults();
Mockito.when(
mockSynapse
.getV2WikiAttachmentHandles(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(testResults);
synapseClient.getV2WikiAttachmentHandles(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).getV2WikiAttachmentHandles(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
Mockito.when(
mockSynapse
.getVersionOfV2WikiAttachmentHandles(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class))).thenReturn(testResults);
synapseClient.getVersionOfV2WikiAttachmentHandles(new WikiPageKey(
"syn123", ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse).getVersionOfV2WikiAttachmentHandles(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class));
}
@Test
public void testZipAndUpload() throws IOException, RestServiceException,
JSONObjectAdapterException, SynapseException {
Mockito.when(
mockSynapse
.createFileHandle(any(File.class), any(String.class)))
.thenReturn(handle);
synapseClient.zipAndUploadFile("markdown", "fileName");
verify(mockSynapse)
.createFileHandle(any(File.class), any(String.class));
}
@Test
public void testGetMarkdown() throws IOException, RestServiceException,
SynapseException {
String someMarkDown = "someMarkDown";
Mockito.when(
mockSynapse
.downloadV2WikiMarkdown(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(someMarkDown);
synapseClient.getMarkdown(new WikiPageKey("syn123", ObjectType.ENTITY
.toString(), "20"));
verify(mockSynapse).downloadV2WikiMarkdown(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
Mockito.when(
mockSynapse
.downloadVersionOfV2WikiMarkdown(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class))).thenReturn(someMarkDown);
synapseClient.getVersionOfMarkdown(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse).downloadVersionOfV2WikiMarkdown(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class));
}
@Test
public void testCreateV2WikiPageWithV1() throws Exception {
Mockito.when(
mockSynapse.createWikiPage(anyString(), any(ObjectType.class),
any(WikiPage.class))).thenReturn(page);
synapseClient.createV2WikiPageWithV1("testId",
ObjectType.ENTITY.toString(), page);
verify(mockSynapse).createWikiPage(anyString(), any(ObjectType.class),
any(WikiPage.class));
}
@Test
public void testUpdateV2WikiPageWithV1() throws Exception {
Mockito.when(
mockSynapse.updateWikiPage(anyString(), any(ObjectType.class),
any(WikiPage.class))).thenReturn(page);
synapseClient.updateV2WikiPageWithV1("testId",
ObjectType.ENTITY.toString(), page);
verify(mockSynapse).updateWikiPage(anyString(), any(ObjectType.class),
any(WikiPage.class));
}
@Test
public void getV2WikiPageAsV1() throws Exception {
Mockito.when(
mockSynapse
.getWikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(page);
Mockito.when(
mockSynapse
.getV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(v2Page);
synapseClient.getV2WikiPageAsV1(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).getWikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
// asking for the same page twice should result in a cache hit, and it
// should not ask for it from the synapse client
synapseClient.getV2WikiPageAsV1(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse, Mockito.times(1)).getWikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
Mockito.when(
mockSynapse
.getWikiPageForVersion(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class))).thenReturn(page);
Mockito.when(
mockSynapse
.getVersionOfV2WikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
anyLong())).thenReturn(v2Page);
synapseClient.getVersionOfV2WikiPageAsV1(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse).getWikiPageForVersion(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class));
// asking for the same page twice should result in a cache hit, and it
// should not ask for it from the synapse client
synapseClient.getVersionOfV2WikiPageAsV1(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse, Mockito.times(1)).getWikiPageForVersion(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class));
}
private void resetUpdateExternalFileHandleMocks(String testId,
FileEntity file, ExternalFileHandle handle)
throws SynapseException, JSONObjectAdapterException {
reset(mockSynapse);
when(mockSynapse.getEntityById(testId)).thenReturn(file);
when(
mockSynapse
.createExternalFileHandle(any(ExternalFileHandle.class)))
.thenReturn(handle);
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(file);
}
@Test
public void testUpdateExternalFileHandle() throws Exception {
// verify call is directly calling the synapse client provider, and it
// tries to rename the entity to the filename
String myFileName = "testFileName.csv";
String testUrl = "http://mytesturl/" + myFileName;
String testId = "myTestId";
FileEntity file = new FileEntity();
String originalFileEntityName = "syn1223";
file.setName(originalFileEntityName);
file.setId(testId);
file.setDataFileHandleId("handle1");
ExternalFileHandle handle = new ExternalFileHandle();
handle.setExternalURL(testUrl);
resetUpdateExternalFileHandleMocks(testId, file, handle);
ArgumentCaptor<FileEntity> arg = ArgumentCaptor
.forClass(FileEntity.class);
synapseClient.updateExternalFile(testId, testUrl, null, storageLocationId);
verify(mockSynapse).getEntityById(testId);
verify(mockSynapse).createExternalFileHandle(
any(ExternalFileHandle.class));
verify(mockSynapse, Mockito.times(2)).putEntity(arg.capture());
// verify rename
FileEntity fileEntityArg = arg.getValue(); // last value captured
assertEquals(myFileName, fileEntityArg.getName());
// and if rename fails, verify all is well (but the FileEntity name is
// not updated)
resetUpdateExternalFileHandleMocks(testId, file, handle);
file.setName(originalFileEntityName);
// first call should return file, second call to putEntity should throw
// an exception
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(file)
.thenThrow(
new IllegalArgumentException(
"invalid name for some reason"));
synapseClient.updateExternalFile(testId, testUrl, "", storageLocationId);
// called createExternalFileHandle
verify(mockSynapse).createExternalFileHandle(
any(ExternalFileHandle.class));
// and it should have called putEntity 2 additional times
verify(mockSynapse, Mockito.times(2)).putEntity(arg.capture());
fileEntityArg = arg.getValue(); // last value captured
assertEquals(originalFileEntityName, fileEntityArg.getName());
// and (finally) verify the correct name if it is explicitly set
resetUpdateExternalFileHandleMocks(testId, file, handle);
String newName = "a new name";
synapseClient.updateExternalFile(testId, testUrl, newName, storageLocationId);
file.setName(newName);
verify(mockSynapse).putEntity(eq(file)); // should equal the previous file but with the new name
}
@Test
public void testCreateExternalFile() throws Exception {
// test setting file handle name
String parentEntityId = "syn123333";
String externalUrl = "sftp://foobar.edu/b/test.txt";
String fileName = "testing.txt";
when(
mockSynapse
.createExternalFileHandle(any(ExternalFileHandle.class)))
.thenReturn(new ExternalFileHandle());
when(mockSynapse.createEntity(any(FileEntity.class))).thenReturn(
new FileEntity());
synapseClient.createExternalFile(parentEntityId, externalUrl, fileName, storageLocationId);
ArgumentCaptor<ExternalFileHandle> captor = ArgumentCaptor
.forClass(ExternalFileHandle.class);
verify(mockSynapse).createExternalFileHandle(captor.capture());
ExternalFileHandle handle = captor.getValue();
// verify name is set
assertEquals(fileName, handle.getFileName());
assertEquals(externalUrl, handle.getExternalURL());
assertEquals(storageLocationId, handle.getStorageLocationId());
}
@Test
public void testGetEntityDoi() throws Exception {
// wiring test
Doi testDoi = new Doi();
testDoi.setDoiStatus(DoiStatus.CREATED);
testDoi.setId("test doi id");
testDoi.setCreatedBy("Test User");
testDoi.setCreatedOn(new Date());
testDoi.setObjectId("syn1234");
Mockito.when(mockSynapse.getEntityDoi(anyString(), anyLong()))
.thenReturn(testDoi);
synapseClient.getEntityDoi("test entity id", null);
verify(mockSynapse).getEntityDoi(anyString(), anyLong());
}
private FileEntity getTestFileEntity() {
FileEntity testFileEntity = new FileEntity();
testFileEntity.setId("5544");
testFileEntity.setName(testFileName);
return testFileEntity;
}
@Test(expected = NotFoundException.class)
public void testGetEntityDoiNotFound() throws Exception {
// wiring test
Mockito.when(mockSynapse.getEntityDoi(anyString(), anyLong()))
.thenThrow(new SynapseNotFoundException());
synapseClient.getEntityDoi("test entity id", null);
}
@Test
public void testCreateDoi() throws Exception {
// wiring test
synapseClient.createDoi("test entity id", null);
verify(mockSynapse).createEntityDoi(anyString(), anyLong());
}
private List<ChunkRequest> getTestChunkRequestJson()
throws JSONObjectAdapterException {
ChunkRequest chunkRequest = new ChunkRequest();
ChunkedFileToken token = new ChunkedFileToken();
token.setKey("test key");
chunkRequest.setChunkedFileToken(token);
chunkRequest.setChunkNumber(1l);
List<ChunkRequest> chunkRequests = new ArrayList<ChunkRequest>();
chunkRequests.add(chunkRequest);
return chunkRequests;
}
@Test
public void testCombineChunkedFileUpload()
throws JSONObjectAdapterException, SynapseException,
RestServiceException {
List<ChunkRequest> chunkRequests = getTestChunkRequestJson();
synapseClient.combineChunkedFileUpload(chunkRequests);
verify(mockSynapse).startUploadDeamon(
any(CompleteAllChunksRequest.class));
}
@Test
public void testGetUploadDaemonStatus() throws JSONObjectAdapterException,
SynapseException, RestServiceException {
synapseClient.getUploadDaemonStatus("daemonId");
verify(mockSynapse).getCompleteUploadDaemonStatus(anyString());
}
/**
* Direct upload tests. Most of the methods are simple pass-throughs to the
* Java Synapse client, but completeUpload has additional logic
*
* @throws JSONObjectAdapterException
* @throws SynapseException
* @throws RestServiceException
*/
@Test
public void testCompleteUpload() throws JSONObjectAdapterException,
SynapseException, RestServiceException {
FileEntity testFileEntity = getTestFileEntity();
when(mockSynapse.createEntity(any(FileEntity.class))).thenReturn(
testFileEntity);
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(
testFileEntity);
// parent entity has no immediate children
EntityIdList childEntities = new EntityIdList();
childEntities.setIdList(new ArrayList());
when(
mockSynapse.getDescendants(anyString(), anyInt(), anyInt(),
anyString())).thenReturn(childEntities);
synapseClient.setFileEntityFileHandle(null, null, "parentEntityId");
// it should have tried to create a new entity (since entity id was
// null)
verify(mockSynapse).createEntity(any(FileEntity.class));
}
@Test(expected = NotFoundException.class)
public void testGetFileEntityIdWithSameNameNotFound()
throws JSONObjectAdapterException, SynapseException,
RestServiceException, JSONException {
JSONObject queryResult = new JSONObject();
queryResult.put("totalNumberOfResults", (long) 0);
when(mockSynapse.query(anyString())).thenReturn(queryResult); // TODO
String fileEntityId = synapseClient.getFileEntityIdWithSameName(
testFileName, "parentEntityId");
}
@Test(expected = ConflictException.class)
public void testGetFileEntityIdWithSameNameConflict()
throws JSONObjectAdapterException, SynapseException,
RestServiceException, JSONException {
Folder folder = new Folder();
folder.setName(testFileName);
JSONObject queryResult = new JSONObject();
JSONArray results = new JSONArray();
// Set up results.
JSONObject objectResult = EntityFactory
.createJSONObjectForEntity(folder);
JSONArray typeArray = new JSONArray();
typeArray.put("Folder");
objectResult.put("entity.concreteType", typeArray);
results.put(objectResult);
// Set up query result.
queryResult.put("totalNumberOfResults", (long) 1);
queryResult.put("results", results);
// Have results returned in query.
when(mockSynapse.query(anyString())).thenReturn(queryResult);
String fileEntityId = synapseClient.getFileEntityIdWithSameName(
testFileName, "parentEntityId");
}
@Test
public void testGetFileEntityIdWithSameNameFound() throws JSONException,
JSONObjectAdapterException, SynapseException, RestServiceException {
FileEntity file = getTestFileEntity();
JSONObject queryResult = new JSONObject();
JSONArray results = new JSONArray();
// Set up results.
JSONObject objectResult = EntityFactory.createJSONObjectForEntity(file);
JSONArray typeArray = new JSONArray();
typeArray.put(FileEntity.class.getName());
objectResult.put("entity.concreteType", typeArray);
objectResult.put("entity.id", file.getId());
results.put(objectResult);
queryResult.put("totalNumberOfResults", (long) 1);
queryResult.put("results", results);
// Have results returned in query.
when(mockSynapse.query(anyString())).thenReturn(queryResult);
String fileEntityId = synapseClient.getFileEntityIdWithSameName(
testFileName, "parentEntityId");
assertEquals(fileEntityId, file.getId());
}
@Test
public void testCompleteChunkedFileUploadExistingEntity()
throws JSONObjectAdapterException, SynapseException,
RestServiceException {
List<ChunkRequest> chunkRequests = getTestChunkRequestJson();
FileEntity testFileEntity = getTestFileEntity();
when(mockSynapse.getEntityById(anyString())).thenReturn(testFileEntity);
when(mockSynapse.createEntity(any(FileEntity.class))).thenThrow(
new AssertionError("No need to create a new entity!"));
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(
testFileEntity);
synapseClient.setFileEntityFileHandle(null, entityId, "parentEntityId");
// it should have tried to find the entity
verify(mockSynapse).getEntityById(anyString());
// update the data file handle id
verify(mockSynapse, Mockito.times(1)).putEntity(any(FileEntity.class));
}
@Test
public void testGetChunkedFileToken() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
String fileName = "test file.zip";
String contentType = "application/test";
String md5 = "0123456789abcdef";
ChunkedFileToken testToken = new ChunkedFileToken();
testToken.setFileName(fileName);
testToken.setKey("a key 42");
testToken.setUploadId("upload ID 123");
testToken.setContentMD5(md5);
testToken.setStorageLocationId(storageLocationId);
when(
mockSynapse
.createChunkedFileUploadToken(any(CreateChunkedFileTokenRequest.class)))
.thenReturn(testToken);
ChunkedFileToken token = synapseClient.getChunkedFileToken(fileName,
contentType, md5, storageLocationId);
verify(mockSynapse).createChunkedFileUploadToken(
any(CreateChunkedFileTokenRequest.class));
assertEquals(testToken, token);
}
@Test
public void testGetChunkedPresignedUrl() throws SynapseException,
RestServiceException, MalformedURLException,
JSONObjectAdapterException {
URL testUrl = new URL("http://test.presignedurl.com/foo");
when(mockSynapse.createChunkedPresignedUrl(any(ChunkRequest.class)))
.thenReturn(testUrl);
String presignedUrl = synapseClient
.getChunkedPresignedUrl(getTestChunkRequestJson().get(0));
verify(mockSynapse).createChunkedPresignedUrl(any(ChunkRequest.class));
assertEquals(testUrl.toString(), presignedUrl);
}
@Test
public void testInviteMemberOpenInvitations() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
membershipStatus.setHasOpenInvitation(true);
// verify it does not create a new invitation since one is already open
synapseClient.inviteMember("123", "a team", "", "");
verify(mockSynapse, Mockito.times(0)).addTeamMember(anyString(),
anyString(), anyString(), anyString());
verify(mockSynapse, Mockito.times(0)).createMembershipInvitation(
any(MembershipInvtnSubmission.class), anyString(), anyString());
}
@Test
public void testRequestMemberOpenRequests() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
membershipStatus.setHasOpenRequest(true);
// verify it does not create a new request since one is already open
synapseClient.requestMembership("123", "a team", "", TEST_HOME_PAGE_BASE);
verify(mockSynapse, Mockito.times(0)).addTeamMember(anyString(),
anyString(), eq(TEST_HOME_PAGE_BASE+"#!Team:"), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:Settings/"));
verify(mockSynapse, Mockito.times(0)).createMembershipRequest(
any(MembershipRqstSubmission.class), anyString(), anyString());
}
@Test
public void testInviteMemberCanJoin() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
membershipStatus.setCanJoin(true);
synapseClient.inviteMember("123", "a team", "", TEST_HOME_PAGE_BASE);
verify(mockSynapse).addTeamMember(anyString(), anyString(), eq(TEST_HOME_PAGE_BASE+"#!Team:"), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:Settings/"));
}
@Test
public void testRequestMembershipCanJoin() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
membershipStatus.setCanJoin(true);
synapseClient.requestMembership("123", "a team", "", TEST_HOME_PAGE_BASE);
verify(mockSynapse).addTeamMember(anyString(), anyString(), eq(TEST_HOME_PAGE_BASE+"#!Team:"), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:Settings/"));
}
@Test
public void testInviteMember() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
synapseClient.inviteMember("123", "a team", "", TEST_HOME_PAGE_BASE);
verify(mockSynapse).createMembershipInvitation(
any(MembershipInvtnSubmission.class), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:JoinTeam/"), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:Settings/"));
}
@Test
public void testRequestMembership() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
synapseClient.requestMembership("123", "a team", "", TEST_HOME_PAGE_BASE);
verify(mockSynapse).createMembershipRequest(
any(MembershipRqstSubmission.class), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:JoinTeam/"), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:Settings/"));
}
@Test
public void testGetOpenRequestCountUnauthorized() throws SynapseException,
RestServiceException {
// is not an admin
TeamMember testTeamMember = new TeamMember();
testTeamMember.setIsAdmin(false);
when(mockSynapse.getTeamMember(anyString(), anyString())).thenReturn(
testTeamMember);
Long count = synapseClient.getOpenRequestCount("myUserId", "myTeamId");
// should never ask for open request count
verify(mockSynapse, Mockito.never()).getOpenMembershipRequests(
anyString(), anyString(), anyLong(), anyLong());
assertNull(count);
}
@Test
public void testGetOpenRequestCount() throws SynapseException,
RestServiceException, MalformedURLException,
JSONObjectAdapterException {
// is admin
TeamMember testTeamMember = new TeamMember();
testTeamMember.setIsAdmin(true);
when(mockSynapse.getTeamMember(anyString(), anyString())).thenReturn(
testTeamMember);
Long testCount = 42L;
PaginatedResults<MembershipRequest> testOpenRequests = new PaginatedResults<MembershipRequest>();
testOpenRequests.setTotalNumberOfResults(testCount);
when(
mockSynapse.getOpenMembershipRequests(anyString(), anyString(),
anyLong(), anyLong())).thenReturn(testOpenRequests);
Long count = synapseClient.getOpenRequestCount("myUserId", "myTeamId");
verify(mockSynapse, Mockito.times(1)).getOpenMembershipRequests(
anyString(), anyString(), anyLong(), anyLong());
assertEquals(testCount, count);
}
@Test
public void testGetOpenTeamInvitations() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
setupTeamInvitations();
int limit = 55;
int offset = 2;
String teamId = "132";
List<OpenTeamInvitationBundle> invitationBundles = synapseClient
.getOpenTeamInvitations(teamId, limit, offset);
verify(mockSynapse).getOpenMembershipInvitationSubmissions(eq(teamId),
anyString(), eq((long) limit), eq((long) offset));
// we set this up so that a single invite would be returned. Verify that
// it is the one we're looking for
assertEquals(1, invitationBundles.size());
OpenTeamInvitationBundle invitationBundle = invitationBundles.get(0);
assertEquals(inviteeUserProfile, invitationBundle.getUserProfile());
assertEquals(testInvitation, invitationBundle.getMembershipInvtnSubmission());
}
@Test
public void testGetTeamBundle() throws SynapseException,
RestServiceException, MalformedURLException,
JSONObjectAdapterException {
// set team member count
Long testMemberCount = 111L;
PaginatedResults<TeamMember> allMembers = new PaginatedResults<TeamMember>();
allMembers.setTotalNumberOfResults(testMemberCount);
when(
mockSynapse.getTeamMembers(anyString(), anyString(), anyLong(),
anyLong())).thenReturn(allMembers);
// set team
Team team = new Team();
team.setId("test team id");
when(mockSynapse.getTeam(anyString())).thenReturn(team);
// is member
TeamMembershipStatus membershipStatus = new TeamMembershipStatus();
membershipStatus.setIsMember(true);
when(mockSynapse.getTeamMembershipStatus(anyString(), anyString()))
.thenReturn(membershipStatus);
// is admin
TeamMember testTeamMember = new TeamMember();
boolean isAdmin = true;
testTeamMember.setIsAdmin(isAdmin);
when(mockSynapse.getTeamMember(anyString(), anyString())).thenReturn(
testTeamMember);
// make the call
TeamBundle bundle = synapseClient.getTeamBundle("myUserId", "myTeamId",
true);
// now verify round all values were returned in the bundle (based on the
// mocked service calls)
assertEquals(team, bundle.getTeam());
assertEquals(membershipStatus, bundle.getTeamMembershipStatus());
assertEquals(isAdmin, bundle.isUserAdmin());
assertEquals(testMemberCount, bundle.getTotalMemberCount());
}
@Test
public void testGetTeamMembers() throws SynapseException,
RestServiceException, MalformedURLException,
JSONObjectAdapterException {
// set team member count
Long testMemberCount = 111L;
PaginatedResults<TeamMember> allMembers = new PaginatedResults<TeamMember>();
allMembers.setTotalNumberOfResults(testMemberCount);
List<TeamMember> members = new ArrayList<TeamMember>();
TeamMember member1 = new TeamMember();
member1.setIsAdmin(true);
UserGroupHeader header1 = new UserGroupHeader();
Long member1Id = 123L;
header1.setOwnerId(member1Id + "");
member1.setMember(header1);
members.add(member1);
TeamMember member2 = new TeamMember();
member2.setIsAdmin(false);
UserGroupHeader header2 = new UserGroupHeader();
Long member2Id = 456L;
header2.setOwnerId(member2Id + "");
member2.setMember(header2);
members.add(member2);
allMembers.setResults(members);
when(
mockSynapse.getTeamMembers(anyString(), anyString(), anyLong(),
anyLong())).thenReturn(allMembers);
List<UserProfile> profiles = new ArrayList<UserProfile>();
UserProfile profile1 = new UserProfile();
profile1.setOwnerId(member1Id + "");
UserProfile profile2 = new UserProfile();
profile2.setOwnerId(member2Id + "");
profiles.add(profile1);
profiles.add(profile2);
when(mockSynapse.listUserProfiles(anyList())).thenReturn(profiles);
// make the call
TeamMemberPagedResults results = synapseClient.getTeamMembers(
"myTeamId", "search term", 100, 0);
// verify it results in the two team member bundles that we expect
List<TeamMemberBundle> memberBundles = results.getResults();
assertEquals(2, memberBundles.size());
TeamMemberBundle bundle1 = memberBundles.get(0);
assertTrue(bundle1.getIsTeamAdmin());
assertEquals(profile1, bundle1.getUserProfile());
TeamMemberBundle bundle2 = memberBundles.get(1);
assertFalse(bundle2.getIsTeamAdmin());
assertEquals(profile2, bundle2.getUserProfile());
}
@Test
public void testGetEntityHeaderBatch() throws SynapseException,
RestServiceException, MalformedURLException,
JSONObjectAdapterException {
List<EntityHeader> headers = synapseClient
.getEntityHeaderBatch(new ArrayList());
// in the setup, we told the mockSynapse.getEntityHeaderBatch to return
// batchHeaderResults
for (int i = 0; i < batchHeaderResults.size(); i++) {
assertEquals(batchHeaderResults.get(i), headers.get(i));
}
}
@Test
public void testSendMessage() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
ArgumentCaptor<MessageToUser> arg = ArgumentCaptor
.forClass(MessageToUser.class);
Set<String> recipients = new HashSet<String>();
recipients.add("333");
String subject = "The Mathematics of Quantum Neutrino Fields";
String messageBody = "Atoms are not to be trusted, they make up everything";
String hostPageBaseURL = "http://localhost/Portal.html";
synapseClient.sendMessage(recipients, subject, messageBody, hostPageBaseURL);
verify(mockSynapse).uploadToFileHandle(any(byte[].class), eq(SynapseClientImpl.HTML_MESSAGE_CONTENT_TYPE));
verify(mockSynapse).sendMessage(arg.capture());
MessageToUser toSendMessage = arg.getValue();
assertEquals(subject, toSendMessage.getSubject());
assertEquals(recipients, toSendMessage.getRecipients());
assertTrue(toSendMessage.getNotificationUnsubscribeEndpoint().startsWith(hostPageBaseURL));
}
@Test
public void testGetCertifiedUserPassingRecord()
throws RestServiceException, SynapseException,
JSONObjectAdapterException {
PassingRecord passingRecord = new PassingRecord();
passingRecord.setPassed(true);
passingRecord.setQuizId(1238L);
String passingRecordJson = passingRecord.writeToJSONObject(
adapterFactory.createNew()).toJSONString();
when(mockSynapse.getCertifiedUserPassingRecord(anyString()))
.thenReturn(passingRecord);
String returnedPassingRecordJson = synapseClient
.getCertifiedUserPassingRecord("123");
verify(mockSynapse).getCertifiedUserPassingRecord(anyString());
assertEquals(passingRecordJson, returnedPassingRecordJson);
}
@Test(expected = NotFoundException.class)
public void testUserNeverAttemptedCertification()
throws RestServiceException, SynapseException {
when(mockSynapse.getCertifiedUserPassingRecord(anyString())).thenThrow(
new SynapseNotFoundException("PassingRecord not found"));
synapseClient.getCertifiedUserPassingRecord("123");
}
@Test(expected = NotFoundException.class)
public void testUserFailedCertification() throws RestServiceException,
SynapseException {
PassingRecord passingRecord = new PassingRecord();
passingRecord.setPassed(false);
passingRecord.setQuizId(1238L);
when(mockSynapse.getCertifiedUserPassingRecord(anyString()))
.thenReturn(passingRecord);
synapseClient.getCertifiedUserPassingRecord("123");
}
@Test
public void testGetCertificationQuiz() throws RestServiceException,
SynapseException {
when(mockSynapse.getCertifiedUserTest()).thenReturn(new Quiz());
synapseClient.getCertificationQuiz();
verify(mockSynapse).getCertifiedUserTest();
}
@Test
public void testSubmitCertificationQuizResponse()
throws RestServiceException, SynapseException,
JSONObjectAdapterException {
PassingRecord mockPassingRecord = new PassingRecord();
when(
mockSynapse
.submitCertifiedUserTestResponse(any(QuizResponse.class)))
.thenReturn(mockPassingRecord);
QuizResponse myResponse = new QuizResponse();
myResponse.setId(837L);
synapseClient.submitCertificationQuizResponse(myResponse);
verify(mockSynapse).submitCertifiedUserTestResponse(eq(myResponse));
}
@Test
public void testMarkdownCache() throws Exception {
Cache<MarkdownCacheRequest, WikiPage> mockCache = Mockito
.mock(Cache.class);
synapseClient.setMarkdownCache(mockCache);
WikiPage page = new WikiPage();
when(mockCache.get(any(MarkdownCacheRequest.class))).thenReturn(page);
Mockito.when(
mockSynapse
.getV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(v2Page);
WikiPage actualResult = synapseClient
.getV2WikiPageAsV1(new WikiPageKey(entity.getId(),
ObjectType.ENTITY.toString(), "12"));
assertEquals(page, actualResult);
verify(mockCache).get(any(MarkdownCacheRequest.class));
}
@Test
public void testMarkdownCacheWithVersion() throws Exception {
Cache<MarkdownCacheRequest, WikiPage> mockCache = Mockito
.mock(Cache.class);
synapseClient.setMarkdownCache(mockCache);
WikiPage page = new WikiPage();
when(mockCache.get(any(MarkdownCacheRequest.class))).thenReturn(page);
Mockito.when(
mockSynapse
.getVersionOfV2WikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
anyLong())).thenReturn(v2Page);
WikiPage actualResult = synapseClient.getVersionOfV2WikiPageAsV1(
new WikiPageKey(entity.getId(), ObjectType.ENTITY.toString(),
"12"), 5L);
assertEquals(page, actualResult);
verify(mockCache).get(any(MarkdownCacheRequest.class));
}
@Test
public void testFilterAccessRequirements() throws Exception {
List<AccessRequirement> unfilteredAccessRequirements = new ArrayList<AccessRequirement>();
List<AccessRequirement> filteredAccessRequirements;
// filter empty list should not result in failure
filteredAccessRequirements = AccessRequirementUtils
.filterAccessRequirements(unfilteredAccessRequirements,
ACCESS_TYPE.UPDATE);
assertTrue(filteredAccessRequirements.isEmpty());
unfilteredAccessRequirements
.add(createAccessRequirement(ACCESS_TYPE.DOWNLOAD));
unfilteredAccessRequirements
.add(createAccessRequirement(ACCESS_TYPE.SUBMIT));
unfilteredAccessRequirements
.add(createAccessRequirement(ACCESS_TYPE.SUBMIT));
// no requirements of type UPDATE
filteredAccessRequirements = AccessRequirementUtils
.filterAccessRequirements(unfilteredAccessRequirements,
ACCESS_TYPE.UPDATE);
assertTrue(filteredAccessRequirements.isEmpty());
// 1 download
filteredAccessRequirements = AccessRequirementUtils
.filterAccessRequirements(unfilteredAccessRequirements,
ACCESS_TYPE.DOWNLOAD);
assertEquals(1, filteredAccessRequirements.size());
// 2 submit
filteredAccessRequirements = AccessRequirementUtils
.filterAccessRequirements(unfilteredAccessRequirements,
ACCESS_TYPE.SUBMIT);
assertEquals(2, filteredAccessRequirements.size());
// finally, filter null list - result will be an empty list
filteredAccessRequirements = AccessRequirementUtils
.filterAccessRequirements(null, ACCESS_TYPE.SUBMIT);
assertNotNull(filteredAccessRequirements);
assertTrue(filteredAccessRequirements.isEmpty());
}
@Test
public void testGetEntityUnmetAccessRequirements() throws Exception {
// verify it calls getUnmetAccessRequirements when unmet is true
synapseClient.getEntityAccessRequirements(entityId, true, null);
verify(mockSynapse)
.getUnmetAccessRequirements(
any(RestrictableObjectDescriptor.class),
any(ACCESS_TYPE.class));
}
@Test
public void testGetAllEntityAccessRequirements() throws Exception {
// verify it calls getAccessRequirements when unmet is false
synapseClient.getEntityAccessRequirements(entityId, false, null);
verify(mockSynapse).getAccessRequirements(
any(RestrictableObjectDescriptor.class));
}
// pass through tests for email validation
@Test
public void testAdditionalEmailValidation() throws Exception {
Long userId = 992843l;
String emailAddress = "[email protected]";
String callbackUrl = "http://www.synapse.org/#!Account:";
synapseClient.additionalEmailValidation(userId.toString(),
emailAddress, callbackUrl);
verify(mockSynapse).additionalEmailValidation(eq(userId),
eq(emailAddress), eq(callbackUrl));
}
@Test
public void testAddEmail() throws Exception {
String emailAddressToken = "long synapse email token";
synapseClient.addEmail(emailAddressToken);
verify(mockSynapse).addEmail(any(AddEmailInfo.class), anyBoolean());
}
@Test
public void testGetNotificationEmail() throws Exception {
synapseClient.getNotificationEmail();
verify(mockSynapse).getNotificationEmail();
}
@Test
public void testSetNotificationEmail() throws Exception {
String emailAddress = "[email protected]";
synapseClient.setNotificationEmail(emailAddress);
verify(mockSynapse).setNotificationEmail(eq(emailAddress));
}
@Test
public void testLogErrorToRepositoryServices() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
String errorMessage = "error has occurred";
String permutationStrongName="Chrome";
synapseClient.logErrorToRepositoryServices(errorMessage, null, null, null, permutationStrongName);
verify(mockSynapse).getMyProfile();
verify(mockSynapse).logError(any(LogEntry.class));
}
@Test
public void testLogErrorToRepositoryServicesTruncation()
throws SynapseException, RestServiceException,
JSONObjectAdapterException, ServletException {
String exceptionMessage = "This exception brought to you by Sage Bionetworks";
Exception e = new Exception(exceptionMessage, new IllegalArgumentException(new NullPointerException()));
ServletContext mockServletContext = Mockito.mock(ServletContext.class);
ServletConfig mockServletConfig = Mockito.mock(ServletConfig.class);
when(mockServletConfig.getServletContext()).thenReturn(mockServletContext);
synapseClient.init(mockServletConfig);
String errorMessage = "error has occurred";
String permutationStrongName="FF";
synapseClient.logErrorToRepositoryServices(errorMessage, e.getClass().getSimpleName(), e.getMessage(), e.getStackTrace(), permutationStrongName);
ArgumentCaptor<LogEntry> captor = ArgumentCaptor
.forClass(LogEntry.class);
verify(mockSynapse).logError(captor.capture());
LogEntry logEntry = captor.getValue();
assertTrue(logEntry.getLabel().length() < SynapseClientImpl.MAX_LOG_ENTRY_LABEL_SIZE + 100);
assertTrue(logEntry.getMessage().contains(errorMessage));
assertTrue(logEntry.getMessage().contains(MY_USER_PROFILE_OWNER_ID));
assertTrue(logEntry.getMessage().contains(e.getClass().getSimpleName()));
assertTrue(logEntry.getMessage().contains(exceptionMessage));
}
@Test
public void testGetMyProjects() throws Exception {
int limit = 11;
int offset = 20;
ProjectPagedResults results = synapseClient.getMyProjects(ProjectListType.MY_PROJECTS, limit, offset,
ProjectListSortColumn.LAST_ACTIVITY, SortDirection.DESC);
verify(mockSynapse).getMyProjects(eq(ProjectListType.MY_PROJECTS),
eq(ProjectListSortColumn.LAST_ACTIVITY),
eq(SortDirection.DESC), eq(limit), eq(offset));
verify(mockSynapse).listUserProfiles(anyList());
}
@Test
public void testGetUserProjects() throws Exception {
int limit = 11;
int offset = 20;
Long userId = 133l;
String userIdString = userId.toString();
synapseClient.getUserProjects(userIdString, limit, offset,
ProjectListSortColumn.LAST_ACTIVITY, SortDirection.DESC);
verify(mockSynapse).getProjectsFromUser(eq(userId),
eq(ProjectListSortColumn.LAST_ACTIVITY),
eq(SortDirection.DESC), eq(limit), eq(offset));
verify(mockSynapse).listUserProfiles(anyList());
}
@Test
public void testGetProjectsForTeam() throws Exception {
int limit = 13;
int offset = 40;
Long teamId = 144l;
String teamIdString = teamId.toString();
synapseClient.getProjectsForTeam(teamIdString, limit, offset,
ProjectListSortColumn.LAST_ACTIVITY, SortDirection.DESC);
verify(mockSynapse).getProjectsForTeam(eq(teamId),
eq(ProjectListSortColumn.LAST_ACTIVITY),
eq(SortDirection.DESC), eq(limit), eq(offset));
verify(mockSynapse).listUserProfiles(anyList());
}
@Test
public void testSafeLongToInt() {
int inRangeInt = 500;
int after = SynapseClientImpl.safeLongToInt(inRangeInt);
assertEquals(inRangeInt, after);
}
@Test(expected = IllegalArgumentException.class)
public void testSafeLongToIntPositive() {
long testValue = Integer.MAX_VALUE;
testValue++;
SynapseClientImpl.safeLongToInt(testValue);
}
@Test(expected = IllegalArgumentException.class)
public void testSafeLongToIntNegative() {
long testValue = Integer.MIN_VALUE;
testValue--;
SynapseClientImpl.safeLongToInt(testValue);
}
@Test
public void testGetHost() throws RestServiceException {
assertEquals("mydomain.com",
synapseClient.getHost("sfTp://mydomain.com/foo/bar"));
assertEquals("mydomain.com",
synapseClient.getHost("http://mydomain.com/foo/bar"));
assertEquals("mydomain.com",
synapseClient.getHost("http://mydomain.com"));
assertEquals("mydomain.com",
synapseClient.getHost("sftp://mydomain.com:22/foo/bar"));
}
@Test(expected = IllegalArgumentException.class)
public void testGetHostNull() throws RestServiceException {
synapseClient.getHost(null);
}
@Test(expected = IllegalArgumentException.class)
public void testGetHostEmpty() throws RestServiceException {
synapseClient.getHost("");
}
@Test(expected = BadRequestException.class)
public void testGetHostBadUrl() throws RestServiceException {
synapseClient.getHost("foobar");
}
@Test
public void testGetRootWikiId() throws JSONObjectAdapterException,
SynapseException, RestServiceException {
org.sagebionetworks.repo.model.dao.WikiPageKey key = new org.sagebionetworks.repo.model.dao.WikiPageKey();
key.setOwnerObjectId("1");
key.setOwnerObjectType(ObjectType.ENTITY);
String expectedId = "123";
key.setWikiPageId(expectedId);
when(mockSynapse.getRootWikiPageKey(anyString(), any(ObjectType.class)))
.thenReturn(key);
String actualId = synapseClient.getRootWikiId("1",
ObjectType.ENTITY.toString());
assertEquals(expectedId, actualId);
}
@Test
public void testGetFavorites() throws JSONObjectAdapterException,
SynapseException, RestServiceException {
PaginatedResults<EntityHeader> pagedResults = new PaginatedResults<EntityHeader>();
List<EntityHeader> unsortedResults = new ArrayList<EntityHeader>();
pagedResults.setResults(unsortedResults);
when(mockSynapse.getFavorites(anyInt(), anyInt())).thenReturn(
pagedResults);
// test empty favorites
List<EntityHeader> actualList = synapseClient.getFavorites();
assertTrue(actualList.isEmpty());
// test a few unsorted favorites
EntityHeader favZ = new EntityHeader();
favZ.setName("Z");
unsortedResults.add(favZ);
EntityHeader favA = new EntityHeader();
favA.setName("A");
unsortedResults.add(favA);
EntityHeader favQ = new EntityHeader();
favQ.setName("q");
unsortedResults.add(favQ);
actualList = synapseClient.getFavorites();
assertEquals(3, actualList.size());
assertEquals(favA, actualList.get(0));
assertEquals(favQ, actualList.get(1));
assertEquals(favZ, actualList.get(2));
}
@Test
public void testGetTeamBundlesNotOwner() throws RestServiceException, SynapseException {
// the paginated results were set up to return {teamZ, teamA}, but
// servlet side we sort by name.
List<TeamRequestBundle> results = synapseClient.getTeamsForUser("abba", false);
verify(mockSynapse).getTeamsForUser(eq("abba"), anyInt(), anyInt());
assertEquals(2, results.size());
assertEquals(teamA, results.get(0).getTeam());
assertEquals(teamZ, results.get(1).getTeam());
verify(mockSynapse, Mockito.never()).getOpenMembershipRequests(anyString(), anyString(),
anyLong(), anyLong());
}
@Test
public void testGetTeamBundlesOwner() throws RestServiceException, SynapseException {
TeamMember testTeamMember = new TeamMember();
testTeamMember.setIsAdmin(true);
when(mockSynapse.getTeamMember(anyString(), anyString())).thenReturn(
testTeamMember);
when(mockSynapse.getOpenMembershipRequests(anyString(), anyString(), anyLong(), anyLong())).thenReturn(mockPaginatedMembershipRequest);
List<TeamRequestBundle> results = synapseClient.getTeamsForUser("abba", true);
verify(mockSynapse).getTeamsForUser(eq("abba"), anyInt(), anyInt());
assertEquals(2, results.size());
assertEquals(teamA, results.get(0).getTeam());
assertEquals(teamZ, results.get(1).getTeam());
Long reqCount1 = results.get(0).getRequestCount();
Long reqCount2 = results.get(1).getRequestCount();
assertEquals(new Long(3L), results.get(0).getRequestCount());
assertEquals(new Long(3L), results.get(1).getRequestCount());
}
@Test
public void testGetEntityInfo() throws RestServiceException,
JSONObjectAdapterException, SynapseException{
EntityBundlePlus entityBundlePlus = synapseClient.getEntityInfo(entityId);
assertEquals(entity, entityBundlePlus.getEntityBundle().getEntity());
assertEquals(annos, entityBundlePlus.getEntityBundle().getAnnotations());
assertEquals(testUserProfile, entityBundlePlus.getProfile());
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenNull() throws RestServiceException, SynapseException{
String tokenTypeName = null;
synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedJoinTeamToken);
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenEmpty() throws RestServiceException, SynapseException{
String tokenTypeName = "";
synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedJoinTeamToken);
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenUnrecognized() throws RestServiceException, SynapseException{
String tokenTypeName = "InvalidTokenType";
synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedJoinTeamToken);
}
@Test
public void testHandleSignedTokenJoinTeam() throws RestServiceException, SynapseException{
String tokenTypeName = NotificationTokenType.JoinTeam.name();
SignedTokenInterface token = synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedJoinTeamToken);
synapseClient.handleSignedToken(token,TEST_HOME_PAGE_BASE);
verify(mockSynapse).addTeamMember(joinTeamToken, TEST_HOME_PAGE_BASE+"#!Team:", TEST_HOME_PAGE_BASE+"#!SignedToken:Settings/");
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenInvalidJoinTeam() throws RestServiceException, SynapseException{
String tokenTypeName = NotificationTokenType.JoinTeam.name();
SignedTokenInterface token = synapseClient.hexDecodeAndSerialize(tokenTypeName, "invalid token");
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenJoinTeamWrongToken() throws RestServiceException, SynapseException{
String tokenTypeName = NotificationTokenType.JoinTeam.name();
SignedTokenInterface token = synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedNotificationSettingsToken);
}
@Test
public void testHandleSignedTokenNotificationSettings() throws RestServiceException, SynapseException{
String tokenTypeName = NotificationTokenType.Settings.name();
SignedTokenInterface token = synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedNotificationSettingsToken);
synapseClient.handleSignedToken(token, TEST_HOME_PAGE_BASE);
verify(mockSynapse).updateNotificationSettings(notificationSettingsToken);
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenInvalidNotificationSettings() throws RestServiceException, SynapseException{
String tokenTypeName = NotificationTokenType.Settings.name();
SignedTokenInterface token = synapseClient.hexDecodeAndSerialize(tokenTypeName, "invalid token");
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenNotificationSettingsWrongToken() throws RestServiceException, SynapseException{
String tokenTypeName = NotificationTokenType.Settings.name();
SignedTokenInterface token = synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedJoinTeamToken);
}
@Test
public void testGetOrCreateActivityForEntityVersionGet() throws SynapseException, RestServiceException {
when(mockSynapse.getActivityForEntityVersion(anyString(), anyLong())).thenReturn(new Activity());
synapseClient.getOrCreateActivityForEntityVersion(entityId, version);
verify(mockSynapse).getActivityForEntityVersion(entityId, version);
}
@Test
public void testGetOrCreateActivityForEntityVersionCreate() throws SynapseException, RestServiceException {
when(mockSynapse.getActivityForEntityVersion(anyString(), anyLong())).thenThrow(new SynapseNotFoundException());
when(mockSynapse.createActivity(any(Activity.class))).thenReturn(mockActivity);
synapseClient.getOrCreateActivityForEntityVersion(entityId, version);
verify(mockSynapse).getActivityForEntityVersion(entityId, version);
verify(mockSynapse).createActivity(any(Activity.class));
verify(mockSynapse).putEntity(mockSynapse.getEntityById(entityId), mockActivity.getId());
}
@Test(expected = Exception.class)
public void testGetOrCreateActivityForEntityVersionFailure() throws SynapseException, RestServiceException {
when(mockSynapse.getActivityForEntityVersion(anyString(), anyLong())).thenThrow(new Exception());
synapseClient.getOrCreateActivityForEntityVersion(entityId, version);
}
private void setupGetMyLocationSettings() throws SynapseException, RestServiceException{
List<StorageLocationSetting> existingStorageLocations = new ArrayList<StorageLocationSetting>();
StorageLocationSetting storageLocation = new ExternalS3StorageLocationSetting();
storageLocation.setStorageLocationId(1L);
storageLocation.setBanner("Banner 1");
existingStorageLocations.add(storageLocation);
storageLocation = new ExternalStorageLocationSetting();
storageLocation.setStorageLocationId(2L);
storageLocation.setBanner("Another Banner");
((ExternalStorageLocationSetting)storageLocation).setUrl("sftp://www.jayhodgson.com");
existingStorageLocations.add(storageLocation);
storageLocation = new ExternalStorageLocationSetting();
storageLocation.setStorageLocationId(3L);
storageLocation.setBanner("Banner 1");
existingStorageLocations.add(storageLocation);
when(mockSynapse.getMyStorageLocationSettings()).thenReturn(existingStorageLocations);
}
@Test
public void testGetMyLocationSettingBanners() throws SynapseException, RestServiceException {
setupGetMyLocationSettings();
List<String> banners = synapseClient.getMyLocationSettingBanners();
verify(mockSynapse).getMyStorageLocationSettings();
//should be 2 (only returns unique values)
assertEquals(2, banners.size());
//and alphabetically sorted
assertEquals(Arrays.asList("Another Banner", "Banner 1"), banners);
}
@Test(expected = Exception.class)
public void testGetMyLocationSettingBannersFailure() throws SynapseException, RestServiceException {
when(mockSynapse.getMyStorageLocationSettings()).thenThrow(new Exception());
synapseClient.getMyLocationSettingBanners();
}
@Test
public void testGetStorageLocationSettingNullSetting() throws SynapseException, RestServiceException {
when(mockSynapse.getProjectSetting(entityId, ProjectSettingsType.upload)).thenReturn(null);
assertNull(synapseClient.getStorageLocationSetting(entityId));
}
@Test
public void testGetStorageLocationSettingEmptyLocations() throws SynapseException, RestServiceException {
UploadDestinationListSetting setting = new UploadDestinationListSetting();
setting.setLocations(Collections.EMPTY_LIST);
when(mockSynapse.getProjectSetting(entityId, ProjectSettingsType.upload)).thenReturn(setting);
assertNull(synapseClient.getStorageLocationSetting(entityId));
}
@Test
public void testGetStorageLocationSetting() throws SynapseException, RestServiceException {
UploadDestinationListSetting setting = new UploadDestinationListSetting();
setting.setLocations(Collections.singletonList(42L));
when(mockSynapse.getProjectSetting(entityId, ProjectSettingsType.upload)).thenReturn(setting);
StorageLocationSetting mockStorageLocationSetting = Mockito.mock(StorageLocationSetting.class);
when(mockSynapse.getMyStorageLocationSetting(anyLong())).thenReturn(mockStorageLocationSetting);
assertEquals(mockStorageLocationSetting, synapseClient.getStorageLocationSetting(entityId));
}
@Test(expected = Exception.class)
public void testGetStorageLocationSettingFailure() throws SynapseException, RestServiceException {
when(mockSynapse.getMyStorageLocationSetting(anyLong())).thenThrow(new Exception());
synapseClient.getStorageLocationSetting(entityId);
}
@Test
public void testCreateStorageLocationSettingFoundStorageAndProjectSetting() throws SynapseException, RestServiceException {
setupGetMyLocationSettings();
UploadDestinationListSetting projectSetting = new UploadDestinationListSetting();
projectSetting.setLocations(Collections.EMPTY_LIST);
when(mockSynapse.getProjectSetting(entityId, ProjectSettingsType.upload)).thenReturn(projectSetting);
//test the case when it finds a duplicate storage location.
ExternalStorageLocationSetting setting = new ExternalStorageLocationSetting();
setting.setBanner("Another Banner");
setting.setUrl("sftp://www.jayhodgson.com");
synapseClient.createStorageLocationSetting(entityId, setting);
//should have found the duplicate storage location, so this is never called
verify(mockSynapse, Mockito.never()).createStorageLocationSetting(any(StorageLocationSetting.class));
//verify updates project setting, and the new location list is a single value (id of existing storage location)
ArgumentCaptor<ProjectSetting> captor = ArgumentCaptor.forClass(ProjectSetting.class);
verify(mockSynapse).updateProjectSetting(captor.capture());
UploadDestinationListSetting updatedProjectSetting = (UploadDestinationListSetting)captor.getValue();
List<Long> locations = updatedProjectSetting.getLocations();
assertEquals(new Long(2), locations.get(0));
}
@Test
public void testCreateStorageLocationSettingNewStorageAndProjectSetting() throws SynapseException, RestServiceException {
setupGetMyLocationSettings();
when(mockSynapse.getProjectSetting(entityId, ProjectSettingsType.upload)).thenReturn(null);
//test the case when it does not find duplicate storage location setting.
ExternalStorageLocationSetting setting = new ExternalStorageLocationSetting();
setting.setBanner("Another Banner");
setting.setUrl("sftp://www.google.com");
Long newStorageLocationId = 1007L;
ExternalStorageLocationSetting createdSetting = new ExternalStorageLocationSetting();
createdSetting.setStorageLocationId(newStorageLocationId);
when(mockSynapse.createStorageLocationSetting(any(StorageLocationSetting.class))).thenReturn(createdSetting);
synapseClient.createStorageLocationSetting(entityId, setting);
//should not have found a duplicate storage location, so this should be called
verify(mockSynapse).createStorageLocationSetting(any(StorageLocationSetting.class));
//verify creates new project setting, and the new location list is a single value (id of the new storage location)
ArgumentCaptor<ProjectSetting> captor = ArgumentCaptor.forClass(ProjectSetting.class);
verify(mockSynapse).createProjectSetting(captor.capture());
UploadDestinationListSetting updatedProjectSetting = (UploadDestinationListSetting)captor.getValue();
List<Long> locations = updatedProjectSetting.getLocations();
assertEquals(newStorageLocationId, locations.get(0));
assertEquals(ProjectSettingsType.upload, updatedProjectSetting.getSettingsType());
assertEquals(entityId, updatedProjectSetting.getProjectId());
}
}
| src/test/java/org/sagebionetworks/web/unitserver/SynapseClientImplTest.java | package org.sagebionetworks.web.unitserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyList;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.sagebionetworks.repo.model.EntityBundle.ACCESS_REQUIREMENTS;
import static org.sagebionetworks.repo.model.EntityBundle.ANNOTATIONS;
import static org.sagebionetworks.repo.model.EntityBundle.ENTITY;
import static org.sagebionetworks.repo.model.EntityBundle.ENTITY_PATH;
import static org.sagebionetworks.repo.model.EntityBundle.HAS_CHILDREN;
import static org.sagebionetworks.repo.model.EntityBundle.PERMISSIONS;
import static org.sagebionetworks.repo.model.EntityBundle.ROOT_WIKI_ID;
import static org.sagebionetworks.repo.model.EntityBundle.UNMET_ACCESS_REQUIREMENTS;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Matchers;
import org.mockito.Mockito;
import org.sagebionetworks.client.SynapseClient;
import org.sagebionetworks.client.exceptions.SynapseException;
import org.sagebionetworks.client.exceptions.SynapseNotFoundException;
import org.sagebionetworks.evaluation.model.Evaluation;
import org.sagebionetworks.evaluation.model.EvaluationStatus;
import org.sagebionetworks.evaluation.model.Participant;
import org.sagebionetworks.evaluation.model.UserEvaluationPermissions;
import org.sagebionetworks.reflection.model.PaginatedResults;
import org.sagebionetworks.repo.model.ACCESS_TYPE;
import org.sagebionetworks.repo.model.AccessControlList;
import org.sagebionetworks.repo.model.AccessRequirement;
import org.sagebionetworks.repo.model.Annotations;
import org.sagebionetworks.repo.model.EntityBundle;
import org.sagebionetworks.repo.model.EntityHeader;
import org.sagebionetworks.repo.model.EntityIdList;
import org.sagebionetworks.repo.model.EntityPath;
import org.sagebionetworks.repo.model.ExampleEntity;
import org.sagebionetworks.repo.model.FileEntity;
import org.sagebionetworks.repo.model.Folder;
import org.sagebionetworks.repo.model.JoinTeamSignedToken;
import org.sagebionetworks.repo.model.LogEntry;
import org.sagebionetworks.repo.model.MembershipInvitation;
import org.sagebionetworks.repo.model.MembershipInvtnSubmission;
import org.sagebionetworks.repo.model.MembershipRequest;
import org.sagebionetworks.repo.model.MembershipRqstSubmission;
import org.sagebionetworks.repo.model.ObjectType;
import org.sagebionetworks.repo.model.Project;
import org.sagebionetworks.repo.model.ProjectHeader;
import org.sagebionetworks.repo.model.ProjectListSortColumn;
import org.sagebionetworks.repo.model.ProjectListType;
import org.sagebionetworks.repo.model.ResourceAccess;
import org.sagebionetworks.repo.model.RestrictableObjectDescriptor;
import org.sagebionetworks.repo.model.RestrictableObjectType;
import org.sagebionetworks.repo.model.SignedTokenInterface;
import org.sagebionetworks.repo.model.Team;
import org.sagebionetworks.repo.model.TeamMember;
import org.sagebionetworks.repo.model.TeamMembershipStatus;
import org.sagebionetworks.repo.model.TermsOfUseAccessRequirement;
import org.sagebionetworks.repo.model.UserGroup;
import org.sagebionetworks.repo.model.UserGroupHeader;
import org.sagebionetworks.repo.model.UserProfile;
import org.sagebionetworks.repo.model.UserSessionData;
import org.sagebionetworks.repo.model.auth.UserEntityPermissions;
import org.sagebionetworks.repo.model.doi.Doi;
import org.sagebionetworks.repo.model.doi.DoiStatus;
import org.sagebionetworks.repo.model.entity.query.SortDirection;
import org.sagebionetworks.repo.model.file.ChunkRequest;
import org.sagebionetworks.repo.model.file.ChunkedFileToken;
import org.sagebionetworks.repo.model.file.CompleteAllChunksRequest;
import org.sagebionetworks.repo.model.file.CompleteChunkedFileRequest;
import org.sagebionetworks.repo.model.file.CreateChunkedFileTokenRequest;
import org.sagebionetworks.repo.model.file.ExternalFileHandle;
import org.sagebionetworks.repo.model.file.FileHandleResults;
import org.sagebionetworks.repo.model.file.S3FileHandle;
import org.sagebionetworks.repo.model.file.State;
import org.sagebionetworks.repo.model.file.UploadDaemonStatus;
import org.sagebionetworks.repo.model.message.MessageToUser;
import org.sagebionetworks.repo.model.message.NotificationSettingsSignedToken;
import org.sagebionetworks.repo.model.message.Settings;
import org.sagebionetworks.repo.model.principal.AddEmailInfo;
import org.sagebionetworks.repo.model.project.ExternalS3StorageLocationSetting;
import org.sagebionetworks.repo.model.project.ExternalStorageLocationSetting;
import org.sagebionetworks.repo.model.project.ProjectSettingsType;
import org.sagebionetworks.repo.model.project.StorageLocationSetting;
import org.sagebionetworks.repo.model.project.UploadDestinationListSetting;
import org.sagebionetworks.repo.model.provenance.Activity;
import org.sagebionetworks.repo.model.quiz.PassingRecord;
import org.sagebionetworks.repo.model.quiz.Quiz;
import org.sagebionetworks.repo.model.quiz.QuizResponse;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiHeader;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiHistorySnapshot;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiOrderHint;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiPage;
import org.sagebionetworks.repo.model.wiki.WikiHeader;
import org.sagebionetworks.repo.model.wiki.WikiPage;
import org.sagebionetworks.schema.adapter.AdapterFactory;
import org.sagebionetworks.schema.adapter.JSONObjectAdapter;
import org.sagebionetworks.schema.adapter.JSONObjectAdapterException;
import org.sagebionetworks.schema.adapter.org.json.AdapterFactoryImpl;
import org.sagebionetworks.schema.adapter.org.json.EntityFactory;
import org.sagebionetworks.schema.adapter.org.json.JSONObjectAdapterImpl;
import org.sagebionetworks.util.SerializationUtils;
import org.sagebionetworks.web.client.view.TeamRequestBundle;
import org.sagebionetworks.web.server.servlet.MarkdownCacheRequest;
import org.sagebionetworks.web.server.servlet.NotificationTokenType;
import org.sagebionetworks.web.server.servlet.ServiceUrlProvider;
import org.sagebionetworks.web.server.servlet.SynapseClientImpl;
import org.sagebionetworks.web.server.servlet.SynapseProvider;
import org.sagebionetworks.web.server.servlet.TokenProvider;
import org.sagebionetworks.web.shared.AccessRequirementUtils;
import org.sagebionetworks.web.shared.EntityBundlePlus;
import org.sagebionetworks.web.shared.OpenTeamInvitationBundle;
import org.sagebionetworks.web.shared.ProjectPagedResults;
import org.sagebionetworks.web.shared.TeamBundle;
import org.sagebionetworks.web.shared.TeamMemberBundle;
import org.sagebionetworks.web.shared.TeamMemberPagedResults;
import org.sagebionetworks.web.shared.WikiPageKey;
import org.sagebionetworks.web.shared.exceptions.BadRequestException;
import org.sagebionetworks.web.shared.exceptions.ConflictException;
import org.sagebionetworks.web.shared.exceptions.NotFoundException;
import org.sagebionetworks.web.shared.exceptions.RestServiceException;
import org.sagebionetworks.web.shared.users.AclUtils;
import org.sagebionetworks.web.shared.users.PermissionLevel;
import com.google.common.cache.Cache;
/**
* Test for the SynapseClientImpl
*
* @author John
*
*/
public class SynapseClientImplTest {
public static final String TEST_HOME_PAGE_BASE = "http://mysynapse.org/";
public static final String MY_USER_PROFILE_OWNER_ID = "MyOwnerID";
SynapseProvider mockSynapseProvider;
TokenProvider mockTokenProvider;
ServiceUrlProvider mockUrlProvider;
SynapseClient mockSynapse;
SynapseClientImpl synapseClient;
String entityId = "123";
String inviteeUserId = "900";
UserProfile inviteeUserProfile;
ExampleEntity entity;
Annotations annos;
UserEntityPermissions eup;
UserEvaluationPermissions userEvaluationPermissions;
List<EntityHeader> batchHeaderResults;
String testFileName = "testFileEntity.R";
EntityPath path;
org.sagebionetworks.reflection.model.PaginatedResults<UserGroup> pgugs;
org.sagebionetworks.reflection.model.PaginatedResults<UserProfile> pgups;
org.sagebionetworks.reflection.model.PaginatedResults<Team> pguts;
Team teamA, teamZ;
AccessControlList acl;
WikiPage page;
V2WikiPage v2Page;
S3FileHandle handle;
Evaluation mockEvaluation;
Participant mockParticipant;
UserSessionData mockUserSessionData;
UserProfile mockUserProfile;
MembershipInvtnSubmission testInvitation;
PaginatedResults mockPaginatedMembershipRequest;
Activity mockActivity;
MessageToUser sentMessage;
Long storageLocationId = 9090L;
UserProfile testUserProfile;
Long version = 1L;
//Token testing
NotificationSettingsSignedToken notificationSettingsToken;
JoinTeamSignedToken joinTeamToken;
String encodedJoinTeamToken, encodedNotificationSettingsToken;
private static final String testUserId = "myUserId";
private static final String EVAL_ID_1 = "eval ID 1";
private static final String EVAL_ID_2 = "eval ID 2";
private static JSONObjectAdapter jsonObjectAdapter = new JSONObjectAdapterImpl();
private static AdapterFactory adapterFactory = new AdapterFactoryImpl();
// private static JSONEntityFactory jsonEntityFactory = new JSONEntityFactoryImpl(
// adapterFactory);
private TeamMembershipStatus membershipStatus;
@Before
public void before() throws SynapseException, JSONObjectAdapterException {
mockSynapse = Mockito.mock(SynapseClient.class);
mockSynapseProvider = Mockito.mock(SynapseProvider.class);
mockUrlProvider = Mockito.mock(ServiceUrlProvider.class);
when(mockSynapseProvider.createNewClient()).thenReturn(mockSynapse);
mockTokenProvider = Mockito.mock(TokenProvider.class);
mockPaginatedMembershipRequest = Mockito.mock(PaginatedResults.class);
mockActivity = Mockito.mock(Activity.class);
when(mockPaginatedMembershipRequest.getTotalNumberOfResults()).thenReturn(3L);
synapseClient = new SynapseClientImpl();
synapseClient.setSynapseProvider(mockSynapseProvider);
synapseClient.setTokenProvider(mockTokenProvider);
synapseClient.setServiceUrlProvider(mockUrlProvider);
// Setup the the entity
entity = new ExampleEntity();
entity.setId(entityId);
entity.setEntityType(ExampleEntity.class.getName());
entity.setModifiedBy(testUserId);
// the mock synapse should return this object
when(mockSynapse.getEntityById(entityId)).thenReturn(entity);
// Setup the annotations
annos = new Annotations();
annos.setId(entityId);
annos.addAnnotation("string", "a string value");
// the mock synapse should return this object
when(mockSynapse.getAnnotations(entityId)).thenReturn(annos);
// Setup the Permissions
eup = new UserEntityPermissions();
eup.setCanDelete(true);
eup.setCanView(false);
eup.setOwnerPrincipalId(999L);
// the mock synapse should return this object
when(mockSynapse.getUsersEntityPermissions(entityId)).thenReturn(eup);
// user can change permissions on eval 2, but not on 1
userEvaluationPermissions = new UserEvaluationPermissions();
userEvaluationPermissions.setCanChangePermissions(false);
when(mockSynapse.getUserEvaluationPermissions(EVAL_ID_1)).thenReturn(
userEvaluationPermissions);
userEvaluationPermissions = new UserEvaluationPermissions();
userEvaluationPermissions.setCanChangePermissions(true);
when(mockSynapse.getUserEvaluationPermissions(EVAL_ID_2)).thenReturn(
userEvaluationPermissions);
// Setup the path
path = new EntityPath();
path.setPath(new ArrayList<EntityHeader>());
EntityHeader header = new EntityHeader();
header.setId(entityId);
header.setName("RomperRuuuu");
path.getPath().add(header);
// the mock synapse should return this object
when(mockSynapse.getEntityPath(entityId)).thenReturn(path);
pgugs = new org.sagebionetworks.reflection.model.PaginatedResults<UserGroup>();
List<UserGroup> ugs = new ArrayList<UserGroup>();
ugs.add(new UserGroup());
pgugs.setResults(ugs);
when(mockSynapse.getGroups(anyInt(), anyInt())).thenReturn(pgugs);
pgups = new org.sagebionetworks.reflection.model.PaginatedResults<UserProfile>();
List<UserProfile> ups = new ArrayList<UserProfile>();
ups.add(new UserProfile());
pgups.setResults(ups);
when(mockSynapse.getUsers(anyInt(), anyInt())).thenReturn(pgups);
pguts = new org.sagebionetworks.reflection.model.PaginatedResults<Team>();
List<Team> uts = new ArrayList<Team>();
teamZ = new Team();
teamZ.setId("1");
teamZ.setName("zygote");
uts.add(teamZ);
teamA = new Team();
teamA.setId("2");
teamA.setName("Amplitude");
uts.add(teamA);
pguts.setResults(uts);
when(mockSynapse.getTeamsForUser(anyString(), anyInt(), anyInt()))
.thenReturn(pguts);
acl = new AccessControlList();
acl.setId("sys999");
Set<ResourceAccess> ras = new HashSet<ResourceAccess>();
ResourceAccess ra = new ResourceAccess();
ra.setPrincipalId(101L);
ra.setAccessType(AclUtils
.getACCESS_TYPEs(PermissionLevel.CAN_ADMINISTER));
acl.setResourceAccess(ras);
when(mockSynapse.getACL(anyString())).thenReturn(acl);
when(mockSynapse.createACL((AccessControlList) any())).thenReturn(acl);
when(mockSynapse.updateACL((AccessControlList) any())).thenReturn(acl);
when(mockSynapse.updateACL((AccessControlList) any(), eq(true)))
.thenReturn(acl);
when(mockSynapse.updateACL((AccessControlList) any(), eq(false)))
.thenReturn(acl);
EntityHeader bene = new EntityHeader();
bene.setId("syn999");
when(mockSynapse.getEntityBenefactor(anyString())).thenReturn(bene);
org.sagebionetworks.reflection.model.PaginatedResults<EntityHeader> batchHeaders = new org.sagebionetworks.reflection.model.PaginatedResults<EntityHeader>();
batchHeaderResults = new ArrayList<EntityHeader>();
for (int i = 0; i < 10; i++) {
EntityHeader h = new EntityHeader();
h.setId("syn" + i);
batchHeaderResults.add(h);
}
batchHeaders.setResults(batchHeaderResults);
when(mockSynapse.getEntityHeaderBatch(anyList())).thenReturn(
batchHeaders);
List<AccessRequirement> accessRequirements = new ArrayList<AccessRequirement>();
accessRequirements.add(createAccessRequirement(ACCESS_TYPE.DOWNLOAD));
int mask = ENTITY | ANNOTATIONS | PERMISSIONS | ENTITY_PATH
| HAS_CHILDREN | ACCESS_REQUIREMENTS
| UNMET_ACCESS_REQUIREMENTS;
int emptyMask = 0;
EntityBundle bundle = new EntityBundle();
bundle.setEntity(entity);
bundle.setAnnotations(annos);
bundle.setPermissions(eup);
bundle.setPath(path);
bundle.setHasChildren(false);
bundle.setAccessRequirements(accessRequirements);
bundle.setUnmetAccessRequirements(accessRequirements);
when(mockSynapse.getEntityBundle(anyString(), Matchers.eq(mask)))
.thenReturn(bundle);
when(mockSynapse.getEntityBundle(anyString(), Matchers.eq(ENTITY | ANNOTATIONS | ROOT_WIKI_ID)))
.thenReturn(bundle);
EntityBundle emptyBundle = new EntityBundle();
when(mockSynapse.getEntityBundle(anyString(), Matchers.eq(emptyMask)))
.thenReturn(emptyBundle);
when(mockSynapse.canAccess("syn101", ACCESS_TYPE.READ))
.thenReturn(true);
page = new WikiPage();
page.setId("testId");
page.setMarkdown("my markdown");
page.setParentWikiId(null);
page.setTitle("A Title");
v2Page = new V2WikiPage();
v2Page.setId("v2TestId");
v2Page.setEtag("122333");
handle = new S3FileHandle();
handle.setId("4422");
handle.setBucketName("bucket");
handle.setFileName(testFileName);
handle.setKey("key");
when(mockSynapse.getRawFileHandle(anyString())).thenReturn(handle);
when(
mockSynapse
.completeChunkFileUpload(any(CompleteChunkedFileRequest.class)))
.thenReturn(handle);
org.sagebionetworks.reflection.model.PaginatedResults<AccessRequirement> ars = new org.sagebionetworks.reflection.model.PaginatedResults<AccessRequirement>();
ars.setTotalNumberOfResults(0);
ars.setResults(new ArrayList<AccessRequirement>());
when(
mockSynapse
.getAccessRequirements(any(RestrictableObjectDescriptor.class)))
.thenReturn(ars);
when(
mockSynapse.getUnmetAccessRequirements(
any(RestrictableObjectDescriptor.class),
any(ACCESS_TYPE.class))).thenReturn(ars);
mockEvaluation = Mockito.mock(Evaluation.class);
when(mockEvaluation.getStatus()).thenReturn(EvaluationStatus.OPEN);
when(mockSynapse.getEvaluation(anyString())).thenReturn(mockEvaluation);
mockUserSessionData = Mockito.mock(UserSessionData.class);
mockUserProfile = Mockito.mock(UserProfile.class);
when(mockSynapse.getUserSessionData()).thenReturn(mockUserSessionData);
when(mockUserSessionData.getProfile()).thenReturn(mockUserProfile);
when(mockUserProfile.getOwnerId()).thenReturn(MY_USER_PROFILE_OWNER_ID);
mockParticipant = Mockito.mock(Participant.class);
when(mockSynapse.getParticipant(anyString(), anyString())).thenReturn(
mockParticipant);
when(mockSynapse.getMyProfile()).thenReturn(mockUserProfile);
when(mockSynapse.createParticipant(anyString())).thenReturn(
mockParticipant);
UploadDaemonStatus status = new UploadDaemonStatus();
String fileHandleId = "myFileHandleId";
status.setFileHandleId(fileHandleId);
status.setState(State.COMPLETED);
when(mockSynapse.getCompleteUploadDaemonStatus(anyString()))
.thenReturn(status);
status = new UploadDaemonStatus();
status.setState(State.PROCESSING);
status.setPercentComplete(.05d);
when(mockSynapse.startUploadDeamon(any(CompleteAllChunksRequest.class)))
.thenReturn(status);
PaginatedResults<MembershipInvitation> openInvites = new PaginatedResults<MembershipInvitation>();
openInvites.setTotalNumberOfResults(0);
when(
mockSynapse.getOpenMembershipInvitations(anyString(),
anyString(), anyLong(), anyLong())).thenReturn(
openInvites);
PaginatedResults<MembershipRequest> openRequests = new PaginatedResults<MembershipRequest>();
openRequests.setTotalNumberOfResults(0);
when(
mockSynapse.getOpenMembershipRequests(anyString(), anyString(),
anyLong(), anyLong())).thenReturn(openRequests);
membershipStatus = new TeamMembershipStatus();
membershipStatus.setCanJoin(false);
membershipStatus.setHasOpenInvitation(false);
membershipStatus.setHasOpenRequest(false);
membershipStatus.setHasUnmetAccessRequirement(false);
membershipStatus.setIsMember(false);
membershipStatus.setMembershipApprovalRequired(false);
when(mockSynapse.getTeamMembershipStatus(anyString(), anyString()))
.thenReturn(membershipStatus);
sentMessage = new MessageToUser();
sentMessage.setId("987");
when(
mockSynapse.sendMessage(any(MessageToUser.class))).thenReturn(sentMessage);
// getMyProjects getUserProjects
PaginatedResults headers = new PaginatedResults<ProjectHeader>();
headers.setTotalNumberOfResults(1100);
List<ProjectHeader> projectHeaders = new ArrayList();
List<UserProfile> userProfile = new ArrayList();
projectHeaders.add(new ProjectHeader());
headers.setResults(projectHeaders);
when(
mockSynapse.getMyProjects(any(ProjectListType.class),
any(ProjectListSortColumn.class),
any(SortDirection.class), anyInt(), anyInt()))
.thenReturn(headers);
when(
mockSynapse.getProjectsFromUser(anyLong(),
any(ProjectListSortColumn.class),
any(SortDirection.class), anyInt(), anyInt()))
.thenReturn(headers);
when(
mockSynapse.getProjectsForTeam(anyLong(),
any(ProjectListSortColumn.class),
any(SortDirection.class), anyInt(), anyInt()))
.thenReturn(headers);
testUserProfile = new UserProfile();
testUserProfile.setUserName("Test User");
when(mockSynapse.getUserProfile(eq(testUserId))).thenReturn(
testUserProfile);
joinTeamToken = new JoinTeamSignedToken();
joinTeamToken.setHmac("98765");
joinTeamToken.setMemberId("1");
joinTeamToken.setTeamId("2");
joinTeamToken.setUserId("3");
encodedJoinTeamToken = SerializationUtils.serializeAndHexEncode(joinTeamToken);
notificationSettingsToken = new NotificationSettingsSignedToken();
notificationSettingsToken.setHmac("987654");
notificationSettingsToken.setSettings(new Settings());
notificationSettingsToken.setUserId("4");
encodedNotificationSettingsToken = SerializationUtils.serializeAndHexEncode(notificationSettingsToken);
}
private AccessRequirement createAccessRequirement(ACCESS_TYPE type) {
TermsOfUseAccessRequirement accessRequirement = new TermsOfUseAccessRequirement();
accessRequirement.setConcreteType(TermsOfUseAccessRequirement.class
.getName());
RestrictableObjectDescriptor descriptor = new RestrictableObjectDescriptor();
descriptor.setId("101");
descriptor.setType(RestrictableObjectType.ENTITY);
accessRequirement.setSubjectIds(Arrays
.asList(new RestrictableObjectDescriptor[] { descriptor }));
accessRequirement.setAccessType(type);
return accessRequirement;
}
private void setupTeamInvitations() throws SynapseException {
ArrayList<MembershipInvtnSubmission> testInvitations = new ArrayList<MembershipInvtnSubmission>();
testInvitation = new MembershipInvtnSubmission();
testInvitation.setId("628319");
testInvitation.setInviteeId(inviteeUserId);
testInvitations.add(testInvitation);
PaginatedResults<MembershipInvtnSubmission> paginatedInvitations = new PaginatedResults<MembershipInvtnSubmission>();
paginatedInvitations.setResults(testInvitations);
when(
mockSynapse.getOpenMembershipInvitationSubmissions(anyString(),
anyString(), anyLong(), anyLong())).thenReturn(
paginatedInvitations);
inviteeUserProfile = new UserProfile();
inviteeUserProfile.setUserName("Invitee User");
inviteeUserProfile.setOwnerId(inviteeUserId);
when(mockSynapse.getUserProfile(eq(inviteeUserId))).thenReturn(
inviteeUserProfile);
}
@Test
public void testGetEntityBundleAll() throws RestServiceException {
// Make sure we can get all parts of the bundel
int mask = ENTITY | ANNOTATIONS | PERMISSIONS | ENTITY_PATH
| HAS_CHILDREN | ACCESS_REQUIREMENTS
| UNMET_ACCESS_REQUIREMENTS;
EntityBundle bundle = synapseClient.getEntityBundle(entityId, mask);
assertNotNull(bundle);
// We should have all of the strings
assertNotNull(bundle.getEntity());
assertNotNull(bundle.getAnnotations());
assertNotNull(bundle.getPath());
assertNotNull(bundle.getPermissions());
assertNotNull(bundle.getHasChildren());
assertNotNull(bundle.getAccessRequirements());
assertNotNull(bundle.getUnmetAccessRequirements());
}
@Test
public void testGetEntityBundleNone() throws RestServiceException {
// Make sure all are null
int mask = 0x0;
EntityBundle bundle = synapseClient.getEntityBundle(entityId, mask);
assertNotNull(bundle);
// We should have all of the strings
assertNull(bundle.getEntity());
assertNull(bundle.getAnnotations());
assertNull(bundle.getPath());
assertNull(bundle.getPermissions());
assertNull(bundle.getHasChildren());
assertNull(bundle.getAccessRequirements());
assertNull(bundle.getUnmetAccessRequirements());
}
@Test(expected = IllegalArgumentException.class)
public void testParseEntityFromJsonNoType()
throws JSONObjectAdapterException {
ExampleEntity example = new ExampleEntity();
example.setName("some name");
example.setDescription("some description");
// do not set the type
String json = EntityFactory.createJSONStringForEntity(example);
// This will fail as the type is required
synapseClient.parseEntityFromJson(json);
}
@Test
public void testParseEntityFromJson() throws JSONObjectAdapterException {
ExampleEntity example = new ExampleEntity();
example.setName("some name");
example.setDescription("some description");
example.setEntityType(ExampleEntity.class.getName());
String json = EntityFactory.createJSONStringForEntity(example);
// System.out.println(json);
// Now make sure this can be read back
ExampleEntity clone = (ExampleEntity) synapseClient
.parseEntityFromJson(json);
assertEquals(example, clone);
}
@Test
public void testCreateOrUpdateEntityFalse()
throws JSONObjectAdapterException, RestServiceException,
SynapseException {
ExampleEntity in = new ExampleEntity();
in.setName("some name");
in.setDescription("some description");
in.setEntityType(ExampleEntity.class.getName());
ExampleEntity out = new ExampleEntity();
out.setName("some name");
out.setDescription("some description");
out.setEntityType(ExampleEntity.class.getName());
out.setId("syn123");
out.setEtag("45");
// when in comes in then return out.
when(mockSynapse.putEntity(in)).thenReturn(out);
String result = synapseClient.createOrUpdateEntity(in, null, false);
assertEquals(out.getId(), result);
verify(mockSynapse).putEntity(in);
}
@Test
public void testCreateOrUpdateEntityTrue()
throws JSONObjectAdapterException, RestServiceException,
SynapseException {
ExampleEntity in = new ExampleEntity();
in.setName("some name");
in.setDescription("some description");
in.setEntityType(ExampleEntity.class.getName());
ExampleEntity out = new ExampleEntity();
out.setName("some name");
out.setDescription("some description");
out.setEntityType(ExampleEntity.class.getName());
out.setId("syn123");
out.setEtag("45");
// when in comes in then return out.
when(mockSynapse.createEntity(in)).thenReturn(out);
String result = synapseClient.createOrUpdateEntity(in, null, true);
assertEquals(out.getId(), result);
verify(mockSynapse).createEntity(in);
}
@Test
public void testCreateOrUpdateEntityTrueWithAnnos()
throws JSONObjectAdapterException, RestServiceException,
SynapseException {
ExampleEntity in = new ExampleEntity();
in.setName("some name");
in.setDescription("some description");
in.setEntityType(ExampleEntity.class.getName());
Annotations annos = new Annotations();
annos.addAnnotation("someString", "one");
ExampleEntity out = new ExampleEntity();
out.setName("some name");
out.setDescription("some description");
out.setEntityType(ExampleEntity.class.getName());
out.setId("syn123");
out.setEtag("45");
// when in comes in then return out.
when(mockSynapse.createEntity(in)).thenReturn(out);
String result = synapseClient.createOrUpdateEntity(in, annos, true);
assertEquals(out.getId(), result);
verify(mockSynapse).createEntity(in);
annos.setEtag(out.getEtag());
annos.setId(out.getId());
verify(mockSynapse).updateAnnotations(out.getId(), annos);
}
@Test
public void testGetEntityBenefactorAcl() throws Exception {
EntityBundle bundle = new EntityBundle();
bundle.setBenefactorAcl(acl);
when(mockSynapse.getEntityBundle("syn101", EntityBundle.BENEFACTOR_ACL))
.thenReturn(bundle);
AccessControlList clone = synapseClient
.getEntityBenefactorAcl("syn101");
assertEquals(acl, clone);
}
@Test
public void testCreateAcl() throws Exception {
AccessControlList clone = synapseClient.createAcl(acl);
assertEquals(acl, clone);
}
@Test
public void testUpdateAcl() throws Exception {
AccessControlList clone = synapseClient.updateAcl(acl);
assertEquals(acl, clone);
}
@Test
public void testUpdateAclRecursive() throws Exception {
AccessControlList clone = synapseClient.updateAcl(acl, true);
assertEquals(acl, clone);
verify(mockSynapse).updateACL(any(AccessControlList.class), eq(true));
}
@Test
public void testDeleteAcl() throws Exception {
EntityBundle bundle = new EntityBundle();
bundle.setBenefactorAcl(acl);
when(mockSynapse.getEntityBundle("syn101", EntityBundle.BENEFACTOR_ACL))
.thenReturn(bundle);
AccessControlList clone = synapseClient.deleteAcl("syn101");
assertEquals(acl, clone);
}
@Test
public void testHasAccess() throws Exception {
assertTrue(synapseClient.hasAccess("syn101", "READ"));
}
@Test
public void testGetUserProfile() throws Exception {
// verify call is directly calling the synapse client provider
String testRepoUrl = "http://mytestrepourl";
when(mockUrlProvider.getRepositoryServiceUrl()).thenReturn(testRepoUrl);
UserProfile userProfile = synapseClient.getUserProfile(testUserId);
assertEquals(userProfile, testUserProfile);
}
@Test
public void testGetProjectById() throws Exception {
String projectId = "syn1029";
Project project = new Project();
project.setId(projectId);
when(mockSynapse.getEntityById(projectId)).thenReturn(project);
Project actualProject = synapseClient.getProject(projectId);
assertEquals(project, actualProject);
}
@Test
public void testGetJSONEntity() throws Exception {
JSONObject json = EntityFactory.createJSONObjectForEntity(entity);
Mockito.when(mockSynapse.getEntity(anyString())).thenReturn(json);
String testRepoUri = "/testservice";
synapseClient.getJSONEntity(testRepoUri);
// verify that this call uses Synapse.getEntity(testRepoUri)
verify(mockSynapse).getEntity(testRepoUri);
}
@Test
public void testGetWikiHeaderTree() throws Exception {
PaginatedResults<WikiHeader> headerTreeResults = new PaginatedResults<WikiHeader>();
when(mockSynapse.getWikiHeaderTree(anyString(), any(ObjectType.class)))
.thenReturn(headerTreeResults);
synapseClient.getWikiHeaderTree("testId", ObjectType.ENTITY.toString());
verify(mockSynapse).getWikiHeaderTree(anyString(),
any(ObjectType.class));
}
@Test
public void testGetWikiAttachmentHandles() throws Exception {
FileHandleResults testResults = new FileHandleResults();
Mockito.when(
mockSynapse
.getWikiAttachmenthHandles(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(testResults);
synapseClient.getWikiAttachmentHandles(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).getWikiAttachmenthHandles(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
}
@Test
public void testDeleteV2WikiPage() throws Exception {
synapseClient.deleteV2WikiPage(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).deleteV2WikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
}
@Test
public void testGetV2WikiPage() throws Exception {
Mockito.when(
mockSynapse
.getV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(v2Page);
synapseClient.getV2WikiPage(new WikiPageKey("syn123", ObjectType.ENTITY
.toString(), "20"));
verify(mockSynapse).getV2WikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
Mockito.when(
mockSynapse
.getVersionOfV2WikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class))).thenReturn(v2Page);
synapseClient.getVersionOfV2WikiPage(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse).getVersionOfV2WikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class));
}
@Test
public void testUpdateV2WikiPage() throws Exception {
Mockito.when(
mockSynapse.updateV2WikiPage(anyString(),
any(ObjectType.class), any(V2WikiPage.class)))
.thenReturn(v2Page);
synapseClient.updateV2WikiPage("testId", ObjectType.ENTITY.toString(),
v2Page);
verify(mockSynapse).updateV2WikiPage(anyString(),
any(ObjectType.class), any(V2WikiPage.class));
}
@Test
public void testRestoreV2WikiPage() throws Exception {
String wikiId = "syn123";
Mockito.when(
mockSynapse.restoreV2WikiPage(anyString(),
any(ObjectType.class), any(String.class), anyLong()))
.thenReturn(v2Page);
synapseClient.restoreV2WikiPage("ownerId",
ObjectType.ENTITY.toString(), wikiId, new Long(2));
verify(mockSynapse).restoreV2WikiPage(anyString(),
any(ObjectType.class), any(String.class), anyLong());
}
@Test
public void testGetV2WikiHeaderTree() throws Exception {
PaginatedResults<V2WikiHeader> headerTreeResults = new PaginatedResults<V2WikiHeader>();
when(
mockSynapse.getV2WikiHeaderTree(anyString(),
any(ObjectType.class))).thenReturn(headerTreeResults);
synapseClient.getV2WikiHeaderTree("testId",
ObjectType.ENTITY.toString());
verify(mockSynapse).getV2WikiHeaderTree(anyString(),
any(ObjectType.class));
}
@Test
public void testGetV2WikiOrderHint() throws Exception {
V2WikiOrderHint orderHint = new V2WikiOrderHint();
when(
mockSynapse
.getV2OrderHint(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(orderHint);
synapseClient.getV2WikiOrderHint(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).getV2OrderHint(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
}
@Test
public void testUpdateV2WikiOrderHint() throws Exception {
V2WikiOrderHint orderHint = new V2WikiOrderHint();
when(mockSynapse.updateV2WikiOrderHint(any(V2WikiOrderHint.class)))
.thenReturn(orderHint);
synapseClient.updateV2WikiOrderHint(orderHint);
verify(mockSynapse).updateV2WikiOrderHint(any(V2WikiOrderHint.class));
}
@Test
public void testGetV2WikiHistory() throws Exception {
PaginatedResults<V2WikiHistorySnapshot> historyResults = new PaginatedResults<V2WikiHistorySnapshot>();
when(
mockSynapse
.getV2WikiHistory(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class), any(Long.class))).thenReturn(
historyResults);
synapseClient.getV2WikiHistory(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"), new Long(10), new Long(0));
verify(mockSynapse).getV2WikiHistory(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class), any(Long.class));
}
@Test
public void testGetV2WikiAttachmentHandles() throws Exception {
FileHandleResults testResults = new FileHandleResults();
Mockito.when(
mockSynapse
.getV2WikiAttachmentHandles(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(testResults);
synapseClient.getV2WikiAttachmentHandles(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).getV2WikiAttachmentHandles(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
Mockito.when(
mockSynapse
.getVersionOfV2WikiAttachmentHandles(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class))).thenReturn(testResults);
synapseClient.getVersionOfV2WikiAttachmentHandles(new WikiPageKey(
"syn123", ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse).getVersionOfV2WikiAttachmentHandles(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class));
}
@Test
public void testZipAndUpload() throws IOException, RestServiceException,
JSONObjectAdapterException, SynapseException {
Mockito.when(
mockSynapse
.createFileHandle(any(File.class), any(String.class)))
.thenReturn(handle);
synapseClient.zipAndUploadFile("markdown", "fileName");
verify(mockSynapse)
.createFileHandle(any(File.class), any(String.class));
}
@Test
public void testGetMarkdown() throws IOException, RestServiceException,
SynapseException {
String someMarkDown = "someMarkDown";
Mockito.when(
mockSynapse
.downloadV2WikiMarkdown(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(someMarkDown);
synapseClient.getMarkdown(new WikiPageKey("syn123", ObjectType.ENTITY
.toString(), "20"));
verify(mockSynapse).downloadV2WikiMarkdown(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
Mockito.when(
mockSynapse
.downloadVersionOfV2WikiMarkdown(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class))).thenReturn(someMarkDown);
synapseClient.getVersionOfMarkdown(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse).downloadVersionOfV2WikiMarkdown(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class));
}
@Test
public void testCreateV2WikiPageWithV1() throws Exception {
Mockito.when(
mockSynapse.createWikiPage(anyString(), any(ObjectType.class),
any(WikiPage.class))).thenReturn(page);
synapseClient.createV2WikiPageWithV1("testId",
ObjectType.ENTITY.toString(), page);
verify(mockSynapse).createWikiPage(anyString(), any(ObjectType.class),
any(WikiPage.class));
}
@Test
public void testUpdateV2WikiPageWithV1() throws Exception {
Mockito.when(
mockSynapse.updateWikiPage(anyString(), any(ObjectType.class),
any(WikiPage.class))).thenReturn(page);
synapseClient.updateV2WikiPageWithV1("testId",
ObjectType.ENTITY.toString(), page);
verify(mockSynapse).updateWikiPage(anyString(), any(ObjectType.class),
any(WikiPage.class));
}
@Test
public void getV2WikiPageAsV1() throws Exception {
Mockito.when(
mockSynapse
.getWikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(page);
Mockito.when(
mockSynapse
.getV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(v2Page);
synapseClient.getV2WikiPageAsV1(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse).getWikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
// asking for the same page twice should result in a cache hit, and it
// should not ask for it from the synapse client
synapseClient.getV2WikiPageAsV1(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"));
verify(mockSynapse, Mockito.times(1)).getWikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class));
Mockito.when(
mockSynapse
.getWikiPageForVersion(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class))).thenReturn(page);
Mockito.when(
mockSynapse
.getVersionOfV2WikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
anyLong())).thenReturn(v2Page);
synapseClient.getVersionOfV2WikiPageAsV1(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse).getWikiPageForVersion(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class));
// asking for the same page twice should result in a cache hit, and it
// should not ask for it from the synapse client
synapseClient.getVersionOfV2WikiPageAsV1(new WikiPageKey("syn123",
ObjectType.ENTITY.toString(), "20"), new Long(0));
verify(mockSynapse, Mockito.times(1)).getWikiPageForVersion(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
any(Long.class));
}
private void resetUpdateExternalFileHandleMocks(String testId,
FileEntity file, ExternalFileHandle handle)
throws SynapseException, JSONObjectAdapterException {
reset(mockSynapse);
when(mockSynapse.getEntityById(testId)).thenReturn(file);
when(
mockSynapse
.createExternalFileHandle(any(ExternalFileHandle.class)))
.thenReturn(handle);
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(file);
}
@Test
public void testUpdateExternalFileHandle() throws Exception {
// verify call is directly calling the synapse client provider, and it
// tries to rename the entity to the filename
String myFileName = "testFileName.csv";
String testUrl = "http://mytesturl/" + myFileName;
String testId = "myTestId";
FileEntity file = new FileEntity();
String originalFileEntityName = "syn1223";
file.setName(originalFileEntityName);
file.setId(testId);
file.setDataFileHandleId("handle1");
ExternalFileHandle handle = new ExternalFileHandle();
handle.setExternalURL(testUrl);
resetUpdateExternalFileHandleMocks(testId, file, handle);
ArgumentCaptor<FileEntity> arg = ArgumentCaptor
.forClass(FileEntity.class);
synapseClient.updateExternalFile(testId, testUrl, null, storageLocationId);
verify(mockSynapse).getEntityById(testId);
verify(mockSynapse).createExternalFileHandle(
any(ExternalFileHandle.class));
verify(mockSynapse, Mockito.times(2)).putEntity(arg.capture());
// verify rename
FileEntity fileEntityArg = arg.getValue(); // last value captured
assertEquals(myFileName, fileEntityArg.getName());
// and if rename fails, verify all is well (but the FileEntity name is
// not updated)
resetUpdateExternalFileHandleMocks(testId, file, handle);
file.setName(originalFileEntityName);
// first call should return file, second call to putEntity should throw
// an exception
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(file)
.thenThrow(
new IllegalArgumentException(
"invalid name for some reason"));
synapseClient.updateExternalFile(testId, testUrl, "", storageLocationId);
// called createExternalFileHandle
verify(mockSynapse).createExternalFileHandle(
any(ExternalFileHandle.class));
// and it should have called putEntity 2 additional times
verify(mockSynapse, Mockito.times(2)).putEntity(arg.capture());
fileEntityArg = arg.getValue(); // last value captured
assertEquals(originalFileEntityName, fileEntityArg.getName());
// and (finally) verify the correct name if it is explicitly set
resetUpdateExternalFileHandleMocks(testId, file, handle);
String newName = "a new name";
synapseClient.updateExternalFile(testId, testUrl, newName, storageLocationId);
file.setName(newName);
verify(mockSynapse).putEntity(eq(file)); // should equal the previous file but with the new name
}
@Test
public void testCreateExternalFile() throws Exception {
// test setting file handle name
String parentEntityId = "syn123333";
String externalUrl = "sftp://foobar.edu/b/test.txt";
String fileName = "testing.txt";
when(
mockSynapse
.createExternalFileHandle(any(ExternalFileHandle.class)))
.thenReturn(new ExternalFileHandle());
when(mockSynapse.createEntity(any(FileEntity.class))).thenReturn(
new FileEntity());
synapseClient.createExternalFile(parentEntityId, externalUrl, fileName, storageLocationId);
ArgumentCaptor<ExternalFileHandle> captor = ArgumentCaptor
.forClass(ExternalFileHandle.class);
verify(mockSynapse).createExternalFileHandle(captor.capture());
ExternalFileHandle handle = captor.getValue();
// verify name is set
assertEquals(fileName, handle.getFileName());
assertEquals(externalUrl, handle.getExternalURL());
assertEquals(storageLocationId, handle.getStorageLocationId());
}
@Test
public void testGetEntityDoi() throws Exception {
// wiring test
Doi testDoi = new Doi();
testDoi.setDoiStatus(DoiStatus.CREATED);
testDoi.setId("test doi id");
testDoi.setCreatedBy("Test User");
testDoi.setCreatedOn(new Date());
testDoi.setObjectId("syn1234");
Mockito.when(mockSynapse.getEntityDoi(anyString(), anyLong()))
.thenReturn(testDoi);
synapseClient.getEntityDoi("test entity id", null);
verify(mockSynapse).getEntityDoi(anyString(), anyLong());
}
private FileEntity getTestFileEntity() {
FileEntity testFileEntity = new FileEntity();
testFileEntity.setId("5544");
testFileEntity.setName(testFileName);
return testFileEntity;
}
@Test(expected = NotFoundException.class)
public void testGetEntityDoiNotFound() throws Exception {
// wiring test
Mockito.when(mockSynapse.getEntityDoi(anyString(), anyLong()))
.thenThrow(new SynapseNotFoundException());
synapseClient.getEntityDoi("test entity id", null);
}
@Test
public void testCreateDoi() throws Exception {
// wiring test
synapseClient.createDoi("test entity id", null);
verify(mockSynapse).createEntityDoi(anyString(), anyLong());
}
private List<ChunkRequest> getTestChunkRequestJson()
throws JSONObjectAdapterException {
ChunkRequest chunkRequest = new ChunkRequest();
ChunkedFileToken token = new ChunkedFileToken();
token.setKey("test key");
chunkRequest.setChunkedFileToken(token);
chunkRequest.setChunkNumber(1l);
List<ChunkRequest> chunkRequests = new ArrayList<ChunkRequest>();
chunkRequests.add(chunkRequest);
return chunkRequests;
}
@Test
public void testCombineChunkedFileUpload()
throws JSONObjectAdapterException, SynapseException,
RestServiceException {
List<ChunkRequest> chunkRequests = getTestChunkRequestJson();
synapseClient.combineChunkedFileUpload(chunkRequests);
verify(mockSynapse).startUploadDeamon(
any(CompleteAllChunksRequest.class));
}
@Test
public void testGetUploadDaemonStatus() throws JSONObjectAdapterException,
SynapseException, RestServiceException {
synapseClient.getUploadDaemonStatus("daemonId");
verify(mockSynapse).getCompleteUploadDaemonStatus(anyString());
}
/**
* Direct upload tests. Most of the methods are simple pass-throughs to the
* Java Synapse client, but completeUpload has additional logic
*
* @throws JSONObjectAdapterException
* @throws SynapseException
* @throws RestServiceException
*/
@Test
public void testCompleteUpload() throws JSONObjectAdapterException,
SynapseException, RestServiceException {
FileEntity testFileEntity = getTestFileEntity();
when(mockSynapse.createEntity(any(FileEntity.class))).thenReturn(
testFileEntity);
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(
testFileEntity);
// parent entity has no immediate children
EntityIdList childEntities = new EntityIdList();
childEntities.setIdList(new ArrayList());
when(
mockSynapse.getDescendants(anyString(), anyInt(), anyInt(),
anyString())).thenReturn(childEntities);
synapseClient.setFileEntityFileHandle(null, null, "parentEntityId");
// it should have tried to create a new entity (since entity id was
// null)
verify(mockSynapse).createEntity(any(FileEntity.class));
}
@Test(expected = NotFoundException.class)
public void testGetFileEntityIdWithSameNameNotFound()
throws JSONObjectAdapterException, SynapseException,
RestServiceException, JSONException {
JSONObject queryResult = new JSONObject();
queryResult.put("totalNumberOfResults", (long) 0);
when(mockSynapse.query(anyString())).thenReturn(queryResult); // TODO
String fileEntityId = synapseClient.getFileEntityIdWithSameName(
testFileName, "parentEntityId");
}
@Test(expected = ConflictException.class)
public void testGetFileEntityIdWithSameNameConflict()
throws JSONObjectAdapterException, SynapseException,
RestServiceException, JSONException {
Folder folder = new Folder();
folder.setName(testFileName);
JSONObject queryResult = new JSONObject();
JSONArray results = new JSONArray();
// Set up results.
JSONObject objectResult = EntityFactory
.createJSONObjectForEntity(folder);
JSONArray typeArray = new JSONArray();
typeArray.put("Folder");
objectResult.put("entity.concreteType", typeArray);
results.put(objectResult);
// Set up query result.
queryResult.put("totalNumberOfResults", (long) 1);
queryResult.put("results", results);
// Have results returned in query.
when(mockSynapse.query(anyString())).thenReturn(queryResult);
String fileEntityId = synapseClient.getFileEntityIdWithSameName(
testFileName, "parentEntityId");
}
@Test
public void testGetFileEntityIdWithSameNameFound() throws JSONException,
JSONObjectAdapterException, SynapseException, RestServiceException {
FileEntity file = getTestFileEntity();
JSONObject queryResult = new JSONObject();
JSONArray results = new JSONArray();
// Set up results.
JSONObject objectResult = EntityFactory.createJSONObjectForEntity(file);
JSONArray typeArray = new JSONArray();
typeArray.put(FileEntity.class.getName());
objectResult.put("entity.concreteType", typeArray);
objectResult.put("entity.id", file.getId());
results.put(objectResult);
queryResult.put("totalNumberOfResults", (long) 1);
queryResult.put("results", results);
// Have results returned in query.
when(mockSynapse.query(anyString())).thenReturn(queryResult);
String fileEntityId = synapseClient.getFileEntityIdWithSameName(
testFileName, "parentEntityId");
assertEquals(fileEntityId, file.getId());
}
@Test
public void testCompleteChunkedFileUploadExistingEntity()
throws JSONObjectAdapterException, SynapseException,
RestServiceException {
List<ChunkRequest> chunkRequests = getTestChunkRequestJson();
FileEntity testFileEntity = getTestFileEntity();
when(mockSynapse.getEntityById(anyString())).thenReturn(testFileEntity);
when(mockSynapse.createEntity(any(FileEntity.class))).thenThrow(
new AssertionError("No need to create a new entity!"));
when(mockSynapse.putEntity(any(FileEntity.class))).thenReturn(
testFileEntity);
synapseClient.setFileEntityFileHandle(null, entityId, "parentEntityId");
// it should have tried to find the entity
verify(mockSynapse).getEntityById(anyString());
// update the data file handle id
verify(mockSynapse, Mockito.times(1)).putEntity(any(FileEntity.class));
}
@Test
public void testGetChunkedFileToken() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
String fileName = "test file.zip";
String contentType = "application/test";
String md5 = "0123456789abcdef";
ChunkedFileToken testToken = new ChunkedFileToken();
testToken.setFileName(fileName);
testToken.setKey("a key 42");
testToken.setUploadId("upload ID 123");
testToken.setContentMD5(md5);
testToken.setStorageLocationId(storageLocationId);
when(
mockSynapse
.createChunkedFileUploadToken(any(CreateChunkedFileTokenRequest.class)))
.thenReturn(testToken);
ChunkedFileToken token = synapseClient.getChunkedFileToken(fileName,
contentType, md5, storageLocationId);
verify(mockSynapse).createChunkedFileUploadToken(
any(CreateChunkedFileTokenRequest.class));
assertEquals(testToken, token);
}
@Test
public void testGetChunkedPresignedUrl() throws SynapseException,
RestServiceException, MalformedURLException,
JSONObjectAdapterException {
URL testUrl = new URL("http://test.presignedurl.com/foo");
when(mockSynapse.createChunkedPresignedUrl(any(ChunkRequest.class)))
.thenReturn(testUrl);
String presignedUrl = synapseClient
.getChunkedPresignedUrl(getTestChunkRequestJson().get(0));
verify(mockSynapse).createChunkedPresignedUrl(any(ChunkRequest.class));
assertEquals(testUrl.toString(), presignedUrl);
}
@Test
public void testInviteMemberOpenInvitations() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
membershipStatus.setHasOpenInvitation(true);
// verify it does not create a new invitation since one is already open
synapseClient.inviteMember("123", "a team", "", "");
verify(mockSynapse, Mockito.times(0)).addTeamMember(anyString(),
anyString(), anyString(), anyString());
verify(mockSynapse, Mockito.times(0)).createMembershipInvitation(
any(MembershipInvtnSubmission.class), anyString(), anyString());
}
@Test
public void testRequestMemberOpenRequests() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
membershipStatus.setHasOpenRequest(true);
// verify it does not create a new request since one is already open
synapseClient.requestMembership("123", "a team", "", TEST_HOME_PAGE_BASE);
verify(mockSynapse, Mockito.times(0)).addTeamMember(anyString(),
anyString(), eq(TEST_HOME_PAGE_BASE+"#!Team:"), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:Settings/"));
verify(mockSynapse, Mockito.times(0)).createMembershipRequest(
any(MembershipRqstSubmission.class), anyString(), anyString());
}
@Test
public void testInviteMemberCanJoin() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
membershipStatus.setCanJoin(true);
synapseClient.inviteMember("123", "a team", "", TEST_HOME_PAGE_BASE);
verify(mockSynapse).addTeamMember(anyString(), anyString(), eq(TEST_HOME_PAGE_BASE+"#!Team:"), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:Settings/"));
}
@Test
public void testRequestMembershipCanJoin() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
membershipStatus.setCanJoin(true);
synapseClient.requestMembership("123", "a team", "", TEST_HOME_PAGE_BASE);
verify(mockSynapse).addTeamMember(anyString(), anyString(), eq(TEST_HOME_PAGE_BASE+"#!Team:"), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:Settings/"));
}
@Test
public void testInviteMember() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
synapseClient.inviteMember("123", "a team", "", TEST_HOME_PAGE_BASE);
verify(mockSynapse).createMembershipInvitation(
any(MembershipInvtnSubmission.class), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:JoinTeam/"), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:Settings/"));
}
@Test
public void testRequestMembership() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
synapseClient.requestMembership("123", "a team", "", TEST_HOME_PAGE_BASE);
verify(mockSynapse).createMembershipRequest(
any(MembershipRqstSubmission.class), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:JoinTeam/"), eq(TEST_HOME_PAGE_BASE+"#!SignedToken:Settings/"));
}
@Test
public void testGetOpenRequestCountUnauthorized() throws SynapseException,
RestServiceException {
// is not an admin
TeamMember testTeamMember = new TeamMember();
testTeamMember.setIsAdmin(false);
when(mockSynapse.getTeamMember(anyString(), anyString())).thenReturn(
testTeamMember);
Long count = synapseClient.getOpenRequestCount("myUserId", "myTeamId");
// should never ask for open request count
verify(mockSynapse, Mockito.never()).getOpenMembershipRequests(
anyString(), anyString(), anyLong(), anyLong());
assertNull(count);
}
@Test
public void testGetOpenRequestCount() throws SynapseException,
RestServiceException, MalformedURLException,
JSONObjectAdapterException {
// is admin
TeamMember testTeamMember = new TeamMember();
testTeamMember.setIsAdmin(true);
when(mockSynapse.getTeamMember(anyString(), anyString())).thenReturn(
testTeamMember);
Long testCount = 42L;
PaginatedResults<MembershipRequest> testOpenRequests = new PaginatedResults<MembershipRequest>();
testOpenRequests.setTotalNumberOfResults(testCount);
when(
mockSynapse.getOpenMembershipRequests(anyString(), anyString(),
anyLong(), anyLong())).thenReturn(testOpenRequests);
Long count = synapseClient.getOpenRequestCount("myUserId", "myTeamId");
verify(mockSynapse, Mockito.times(1)).getOpenMembershipRequests(
anyString(), anyString(), anyLong(), anyLong());
assertEquals(testCount, count);
}
@Test
public void testGetOpenTeamInvitations() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
setupTeamInvitations();
int limit = 55;
int offset = 2;
String teamId = "132";
List<OpenTeamInvitationBundle> invitationBundles = synapseClient
.getOpenTeamInvitations(teamId, limit, offset);
verify(mockSynapse).getOpenMembershipInvitationSubmissions(eq(teamId),
anyString(), eq((long) limit), eq((long) offset));
// we set this up so that a single invite would be returned. Verify that
// it is the one we're looking for
assertEquals(1, invitationBundles.size());
OpenTeamInvitationBundle invitationBundle = invitationBundles.get(0);
assertEquals(inviteeUserProfile, invitationBundle.getUserProfile());
assertEquals(testInvitation, invitationBundle.getMembershipInvtnSubmission());
}
@Test
public void testGetTeamBundle() throws SynapseException,
RestServiceException, MalformedURLException,
JSONObjectAdapterException {
// set team member count
Long testMemberCount = 111L;
PaginatedResults<TeamMember> allMembers = new PaginatedResults<TeamMember>();
allMembers.setTotalNumberOfResults(testMemberCount);
when(
mockSynapse.getTeamMembers(anyString(), anyString(), anyLong(),
anyLong())).thenReturn(allMembers);
// set team
Team team = new Team();
team.setId("test team id");
when(mockSynapse.getTeam(anyString())).thenReturn(team);
// is member
TeamMembershipStatus membershipStatus = new TeamMembershipStatus();
membershipStatus.setIsMember(true);
when(mockSynapse.getTeamMembershipStatus(anyString(), anyString()))
.thenReturn(membershipStatus);
// is admin
TeamMember testTeamMember = new TeamMember();
boolean isAdmin = true;
testTeamMember.setIsAdmin(isAdmin);
when(mockSynapse.getTeamMember(anyString(), anyString())).thenReturn(
testTeamMember);
// make the call
TeamBundle bundle = synapseClient.getTeamBundle("myUserId", "myTeamId",
true);
// now verify round all values were returned in the bundle (based on the
// mocked service calls)
assertEquals(team, bundle.getTeam());
assertEquals(membershipStatus, bundle.getTeamMembershipStatus());
assertEquals(isAdmin, bundle.isUserAdmin());
assertEquals(testMemberCount, bundle.getTotalMemberCount());
}
@Test
public void testGetTeamMembers() throws SynapseException,
RestServiceException, MalformedURLException,
JSONObjectAdapterException {
// set team member count
Long testMemberCount = 111L;
PaginatedResults<TeamMember> allMembers = new PaginatedResults<TeamMember>();
allMembers.setTotalNumberOfResults(testMemberCount);
List<TeamMember> members = new ArrayList<TeamMember>();
TeamMember member1 = new TeamMember();
member1.setIsAdmin(true);
UserGroupHeader header1 = new UserGroupHeader();
Long member1Id = 123L;
header1.setOwnerId(member1Id + "");
member1.setMember(header1);
members.add(member1);
TeamMember member2 = new TeamMember();
member2.setIsAdmin(false);
UserGroupHeader header2 = new UserGroupHeader();
Long member2Id = 456L;
header2.setOwnerId(member2Id + "");
member2.setMember(header2);
members.add(member2);
allMembers.setResults(members);
when(
mockSynapse.getTeamMembers(anyString(), anyString(), anyLong(),
anyLong())).thenReturn(allMembers);
List<UserProfile> profiles = new ArrayList<UserProfile>();
UserProfile profile1 = new UserProfile();
profile1.setOwnerId(member1Id + "");
UserProfile profile2 = new UserProfile();
profile2.setOwnerId(member2Id + "");
profiles.add(profile1);
profiles.add(profile2);
when(mockSynapse.listUserProfiles(anyList())).thenReturn(profiles);
// make the call
TeamMemberPagedResults results = synapseClient.getTeamMembers(
"myTeamId", "search term", 100, 0);
// verify it results in the two team member bundles that we expect
List<TeamMemberBundle> memberBundles = results.getResults();
assertEquals(2, memberBundles.size());
TeamMemberBundle bundle1 = memberBundles.get(0);
assertTrue(bundle1.getIsTeamAdmin());
assertEquals(profile1, bundle1.getUserProfile());
TeamMemberBundle bundle2 = memberBundles.get(1);
assertFalse(bundle2.getIsTeamAdmin());
assertEquals(profile2, bundle2.getUserProfile());
}
@Test
public void testGetEntityHeaderBatch() throws SynapseException,
RestServiceException, MalformedURLException,
JSONObjectAdapterException {
List<EntityHeader> headers = synapseClient
.getEntityHeaderBatch(new ArrayList());
// in the setup, we told the mockSynapse.getEntityHeaderBatch to return
// batchHeaderResults
for (int i = 0; i < batchHeaderResults.size(); i++) {
assertEquals(batchHeaderResults.get(i), headers.get(i));
}
}
@Test
public void testSendMessage() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
ArgumentCaptor<MessageToUser> arg = ArgumentCaptor
.forClass(MessageToUser.class);
Set<String> recipients = new HashSet<String>();
recipients.add("333");
String subject = "The Mathematics of Quantum Neutrino Fields";
String messageBody = "Atoms are not to be trusted, they make up everything";
String hostPageBaseURL = "http://localhost/Portal.html";
synapseClient.sendMessage(recipients, subject, messageBody, hostPageBaseURL);
verify(mockSynapse).uploadToFileHandle(any(byte[].class), eq(SynapseClientImpl.HTML_MESSAGE_CONTENT_TYPE));
verify(mockSynapse).sendMessage(arg.capture());
MessageToUser toSendMessage = arg.getValue();
assertEquals(subject, toSendMessage.getSubject());
assertEquals(recipients, toSendMessage.getRecipients());
assertTrue(toSendMessage.getNotificationUnsubscribeEndpoint().startsWith(hostPageBaseURL));
}
@Test
public void testGetCertifiedUserPassingRecord()
throws RestServiceException, SynapseException,
JSONObjectAdapterException {
PassingRecord passingRecord = new PassingRecord();
passingRecord.setPassed(true);
passingRecord.setQuizId(1238L);
String passingRecordJson = passingRecord.writeToJSONObject(
adapterFactory.createNew()).toJSONString();
when(mockSynapse.getCertifiedUserPassingRecord(anyString()))
.thenReturn(passingRecord);
String returnedPassingRecordJson = synapseClient
.getCertifiedUserPassingRecord("123");
verify(mockSynapse).getCertifiedUserPassingRecord(anyString());
assertEquals(passingRecordJson, returnedPassingRecordJson);
}
@Test(expected = NotFoundException.class)
public void testUserNeverAttemptedCertification()
throws RestServiceException, SynapseException {
when(mockSynapse.getCertifiedUserPassingRecord(anyString())).thenThrow(
new SynapseNotFoundException("PassingRecord not found"));
synapseClient.getCertifiedUserPassingRecord("123");
}
@Test(expected = NotFoundException.class)
public void testUserFailedCertification() throws RestServiceException,
SynapseException {
PassingRecord passingRecord = new PassingRecord();
passingRecord.setPassed(false);
passingRecord.setQuizId(1238L);
when(mockSynapse.getCertifiedUserPassingRecord(anyString()))
.thenReturn(passingRecord);
synapseClient.getCertifiedUserPassingRecord("123");
}
@Test
public void testGetCertificationQuiz() throws RestServiceException,
SynapseException {
when(mockSynapse.getCertifiedUserTest()).thenReturn(new Quiz());
synapseClient.getCertificationQuiz();
verify(mockSynapse).getCertifiedUserTest();
}
@Test
public void testSubmitCertificationQuizResponse()
throws RestServiceException, SynapseException,
JSONObjectAdapterException {
PassingRecord mockPassingRecord = new PassingRecord();
when(
mockSynapse
.submitCertifiedUserTestResponse(any(QuizResponse.class)))
.thenReturn(mockPassingRecord);
QuizResponse myResponse = new QuizResponse();
myResponse.setId(837L);
synapseClient.submitCertificationQuizResponse(myResponse);
verify(mockSynapse).submitCertifiedUserTestResponse(eq(myResponse));
}
@Test
public void testMarkdownCache() throws Exception {
Cache<MarkdownCacheRequest, WikiPage> mockCache = Mockito
.mock(Cache.class);
synapseClient.setMarkdownCache(mockCache);
WikiPage page = new WikiPage();
when(mockCache.get(any(MarkdownCacheRequest.class))).thenReturn(page);
Mockito.when(
mockSynapse
.getV2WikiPage(any(org.sagebionetworks.repo.model.dao.WikiPageKey.class)))
.thenReturn(v2Page);
WikiPage actualResult = synapseClient
.getV2WikiPageAsV1(new WikiPageKey(entity.getId(),
ObjectType.ENTITY.toString(), "12"));
assertEquals(page, actualResult);
verify(mockCache).get(any(MarkdownCacheRequest.class));
}
@Test
public void testMarkdownCacheWithVersion() throws Exception {
Cache<MarkdownCacheRequest, WikiPage> mockCache = Mockito
.mock(Cache.class);
synapseClient.setMarkdownCache(mockCache);
WikiPage page = new WikiPage();
when(mockCache.get(any(MarkdownCacheRequest.class))).thenReturn(page);
Mockito.when(
mockSynapse
.getVersionOfV2WikiPage(
any(org.sagebionetworks.repo.model.dao.WikiPageKey.class),
anyLong())).thenReturn(v2Page);
WikiPage actualResult = synapseClient.getVersionOfV2WikiPageAsV1(
new WikiPageKey(entity.getId(), ObjectType.ENTITY.toString(),
"12"), 5L);
assertEquals(page, actualResult);
verify(mockCache).get(any(MarkdownCacheRequest.class));
}
@Test
public void testFilterAccessRequirements() throws Exception {
List<AccessRequirement> unfilteredAccessRequirements = new ArrayList<AccessRequirement>();
List<AccessRequirement> filteredAccessRequirements;
// filter empty list should not result in failure
filteredAccessRequirements = AccessRequirementUtils
.filterAccessRequirements(unfilteredAccessRequirements,
ACCESS_TYPE.UPDATE);
assertTrue(filteredAccessRequirements.isEmpty());
unfilteredAccessRequirements
.add(createAccessRequirement(ACCESS_TYPE.DOWNLOAD));
unfilteredAccessRequirements
.add(createAccessRequirement(ACCESS_TYPE.SUBMIT));
unfilteredAccessRequirements
.add(createAccessRequirement(ACCESS_TYPE.SUBMIT));
// no requirements of type UPDATE
filteredAccessRequirements = AccessRequirementUtils
.filterAccessRequirements(unfilteredAccessRequirements,
ACCESS_TYPE.UPDATE);
assertTrue(filteredAccessRequirements.isEmpty());
// 1 download
filteredAccessRequirements = AccessRequirementUtils
.filterAccessRequirements(unfilteredAccessRequirements,
ACCESS_TYPE.DOWNLOAD);
assertEquals(1, filteredAccessRequirements.size());
// 2 submit
filteredAccessRequirements = AccessRequirementUtils
.filterAccessRequirements(unfilteredAccessRequirements,
ACCESS_TYPE.SUBMIT);
assertEquals(2, filteredAccessRequirements.size());
// finally, filter null list - result will be an empty list
filteredAccessRequirements = AccessRequirementUtils
.filterAccessRequirements(null, ACCESS_TYPE.SUBMIT);
assertNotNull(filteredAccessRequirements);
assertTrue(filteredAccessRequirements.isEmpty());
}
@Test
public void testGetEntityUnmetAccessRequirements() throws Exception {
// verify it calls getUnmetAccessRequirements when unmet is true
synapseClient.getEntityAccessRequirements(entityId, true, null);
verify(mockSynapse)
.getUnmetAccessRequirements(
any(RestrictableObjectDescriptor.class),
any(ACCESS_TYPE.class));
}
@Test
public void testGetAllEntityAccessRequirements() throws Exception {
// verify it calls getAccessRequirements when unmet is false
synapseClient.getEntityAccessRequirements(entityId, false, null);
verify(mockSynapse).getAccessRequirements(
any(RestrictableObjectDescriptor.class));
}
// pass through tests for email validation
@Test
public void testAdditionalEmailValidation() throws Exception {
Long userId = 992843l;
String emailAddress = "[email protected]";
String callbackUrl = "http://www.synapse.org/#!Account:";
synapseClient.additionalEmailValidation(userId.toString(),
emailAddress, callbackUrl);
verify(mockSynapse).additionalEmailValidation(eq(userId),
eq(emailAddress), eq(callbackUrl));
}
@Test
public void testAddEmail() throws Exception {
String emailAddressToken = "long synapse email token";
synapseClient.addEmail(emailAddressToken);
verify(mockSynapse).addEmail(any(AddEmailInfo.class), anyBoolean());
}
@Test
public void testGetNotificationEmail() throws Exception {
synapseClient.getNotificationEmail();
verify(mockSynapse).getNotificationEmail();
}
@Test
public void testSetNotificationEmail() throws Exception {
String emailAddress = "[email protected]";
synapseClient.setNotificationEmail(emailAddress);
verify(mockSynapse).setNotificationEmail(eq(emailAddress));
}
@Test
public void testLogErrorToRepositoryServices() throws SynapseException,
RestServiceException, JSONObjectAdapterException {
String errorMessage = "error has occurred";
String permutationStrongName="Chrome";
synapseClient.logErrorToRepositoryServices(errorMessage, null, null, null, permutationStrongName);
verify(mockSynapse).getMyProfile();
verify(mockSynapse).logError(any(LogEntry.class));
}
@Test
public void testLogErrorToRepositoryServicesTruncation()
throws SynapseException, RestServiceException,
JSONObjectAdapterException, ServletException {
String exceptionMessage = "This exception brought to you by Sage Bionetworks";
Exception e = new Exception(exceptionMessage, new IllegalArgumentException(new NullPointerException()));
ServletContext mockServletContext = Mockito.mock(ServletContext.class);
ServletConfig mockServletConfig = Mockito.mock(ServletConfig.class);
when(mockServletConfig.getServletContext()).thenReturn(mockServletContext);
synapseClient.init(mockServletConfig);
String errorMessage = "error has occurred";
String permutationStrongName="FF";
synapseClient.logErrorToRepositoryServices(errorMessage, e.getClass().getSimpleName(), e.getMessage(), e.getStackTrace(), permutationStrongName);
ArgumentCaptor<LogEntry> captor = ArgumentCaptor
.forClass(LogEntry.class);
verify(mockSynapse).logError(captor.capture());
LogEntry logEntry = captor.getValue();
assertTrue(logEntry.getLabel().length() < SynapseClientImpl.MAX_LOG_ENTRY_LABEL_SIZE + 100);
assertTrue(logEntry.getMessage().contains(errorMessage));
assertTrue(logEntry.getMessage().contains(MY_USER_PROFILE_OWNER_ID));
assertTrue(logEntry.getMessage().contains(e.getClass().getSimpleName()));
assertTrue(logEntry.getMessage().contains(exceptionMessage));
}
@Test
public void testGetMyProjects() throws Exception {
int limit = 11;
int offset = 20;
ProjectPagedResults results = synapseClient.getMyProjects(ProjectListType.MY_PROJECTS, limit, offset,
ProjectListSortColumn.LAST_ACTIVITY, SortDirection.DESC);
verify(mockSynapse).getMyProjects(eq(ProjectListType.MY_PROJECTS),
eq(ProjectListSortColumn.LAST_ACTIVITY),
eq(SortDirection.DESC), eq(limit), eq(offset));
verify(mockSynapse).listUserProfiles(anyList());
}
@Test
public void testGetUserProjects() throws Exception {
int limit = 11;
int offset = 20;
Long userId = 133l;
String userIdString = userId.toString();
synapseClient.getUserProjects(userIdString, limit, offset,
ProjectListSortColumn.LAST_ACTIVITY, SortDirection.DESC);
verify(mockSynapse).getProjectsFromUser(eq(userId),
eq(ProjectListSortColumn.LAST_ACTIVITY),
eq(SortDirection.DESC), eq(limit), eq(offset));
verify(mockSynapse).listUserProfiles(anyList());
}
@Test
public void testGetProjectsForTeam() throws Exception {
int limit = 13;
int offset = 40;
Long teamId = 144l;
String teamIdString = teamId.toString();
synapseClient.getProjectsForTeam(teamIdString, limit, offset,
ProjectListSortColumn.LAST_ACTIVITY, SortDirection.DESC);
verify(mockSynapse).getProjectsForTeam(eq(teamId),
eq(ProjectListSortColumn.LAST_ACTIVITY),
eq(SortDirection.DESC), eq(limit), eq(offset));
verify(mockSynapse).listUserProfiles(anyList());
}
@Test
public void testSafeLongToInt() {
int inRangeInt = 500;
int after = SynapseClientImpl.safeLongToInt(inRangeInt);
assertEquals(inRangeInt, after);
}
@Test(expected = IllegalArgumentException.class)
public void testSafeLongToIntPositive() {
long testValue = Integer.MAX_VALUE;
testValue++;
SynapseClientImpl.safeLongToInt(testValue);
}
@Test(expected = IllegalArgumentException.class)
public void testSafeLongToIntNegative() {
long testValue = Integer.MIN_VALUE;
testValue--;
SynapseClientImpl.safeLongToInt(testValue);
}
@Test
public void testGetHost() throws RestServiceException {
assertEquals("mydomain.com",
synapseClient.getHost("sfTp://mydomain.com/foo/bar"));
assertEquals("mydomain.com",
synapseClient.getHost("http://mydomain.com/foo/bar"));
assertEquals("mydomain.com",
synapseClient.getHost("http://mydomain.com"));
assertEquals("mydomain.com",
synapseClient.getHost("sftp://mydomain.com:22/foo/bar"));
}
@Test(expected = IllegalArgumentException.class)
public void testGetHostNull() throws RestServiceException {
synapseClient.getHost(null);
}
@Test(expected = IllegalArgumentException.class)
public void testGetHostEmpty() throws RestServiceException {
synapseClient.getHost("");
}
@Test(expected = BadRequestException.class)
public void testGetHostBadUrl() throws RestServiceException {
synapseClient.getHost("foobar");
}
@Test
public void testGetRootWikiId() throws JSONObjectAdapterException,
SynapseException, RestServiceException {
org.sagebionetworks.repo.model.dao.WikiPageKey key = new org.sagebionetworks.repo.model.dao.WikiPageKey();
key.setOwnerObjectId("1");
key.setOwnerObjectType(ObjectType.ENTITY);
String expectedId = "123";
key.setWikiPageId(expectedId);
when(mockSynapse.getRootWikiPageKey(anyString(), any(ObjectType.class)))
.thenReturn(key);
String actualId = synapseClient.getRootWikiId("1",
ObjectType.ENTITY.toString());
assertEquals(expectedId, actualId);
}
@Test
public void testGetFavorites() throws JSONObjectAdapterException,
SynapseException, RestServiceException {
PaginatedResults<EntityHeader> pagedResults = new PaginatedResults<EntityHeader>();
List<EntityHeader> unsortedResults = new ArrayList<EntityHeader>();
pagedResults.setResults(unsortedResults);
when(mockSynapse.getFavorites(anyInt(), anyInt())).thenReturn(
pagedResults);
// test empty favorites
List<EntityHeader> actualList = synapseClient.getFavorites();
assertTrue(actualList.isEmpty());
// test a few unsorted favorites
EntityHeader favZ = new EntityHeader();
favZ.setName("Z");
unsortedResults.add(favZ);
EntityHeader favA = new EntityHeader();
favA.setName("A");
unsortedResults.add(favA);
EntityHeader favQ = new EntityHeader();
favQ.setName("q");
unsortedResults.add(favQ);
actualList = synapseClient.getFavorites();
assertEquals(3, actualList.size());
assertEquals(favA, actualList.get(0));
assertEquals(favQ, actualList.get(1));
assertEquals(favZ, actualList.get(2));
}
@Test
public void testGetTeamBundlesNotOwner() throws RestServiceException, SynapseException {
// the paginated results were set up to return {teamZ, teamA}, but
// servlet side we sort by name.
List<TeamRequestBundle> results = synapseClient.getTeamsForUser("abba", false);
verify(mockSynapse).getTeamsForUser(eq("abba"), anyInt(), anyInt());
assertEquals(2, results.size());
assertEquals(teamA, results.get(0).getTeam());
assertEquals(teamZ, results.get(1).getTeam());
verify(mockSynapse, Mockito.never()).getOpenMembershipRequests(anyString(), anyString(),
anyLong(), anyLong());
}
@Test
public void testGetTeamBundlesOwner() throws RestServiceException, SynapseException {
TeamMember testTeamMember = new TeamMember();
testTeamMember.setIsAdmin(true);
when(mockSynapse.getTeamMember(anyString(), anyString())).thenReturn(
testTeamMember);
when(mockSynapse.getOpenMembershipRequests(anyString(), anyString(), anyLong(), anyLong())).thenReturn(mockPaginatedMembershipRequest);
List<TeamRequestBundle> results = synapseClient.getTeamsForUser("abba", true);
verify(mockSynapse).getTeamsForUser(eq("abba"), anyInt(), anyInt());
assertEquals(2, results.size());
assertEquals(teamA, results.get(0).getTeam());
assertEquals(teamZ, results.get(1).getTeam());
Long reqCount1 = results.get(0).getRequestCount();
Long reqCount2 = results.get(1).getRequestCount();
assertEquals(new Long(3L), results.get(0).getRequestCount());
assertEquals(new Long(3L), results.get(1).getRequestCount());
}
@Test
public void testGetEntityInfo() throws RestServiceException,
JSONObjectAdapterException, SynapseException{
EntityBundlePlus entityBundlePlus = synapseClient.getEntityInfo(entityId);
assertEquals(entity, entityBundlePlus.getEntityBundle().getEntity());
assertEquals(annos, entityBundlePlus.getEntityBundle().getAnnotations());
assertEquals(testUserProfile, entityBundlePlus.getProfile());
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenNull() throws RestServiceException, SynapseException{
String tokenTypeName = null;
synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedJoinTeamToken);
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenEmpty() throws RestServiceException, SynapseException{
String tokenTypeName = "";
synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedJoinTeamToken);
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenUnrecognized() throws RestServiceException, SynapseException{
String tokenTypeName = "InvalidTokenType";
synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedJoinTeamToken);
}
@Test
public void testHandleSignedTokenJoinTeam() throws RestServiceException, SynapseException{
String tokenTypeName = NotificationTokenType.JoinTeam.name();
SignedTokenInterface token = synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedJoinTeamToken);
synapseClient.handleSignedToken(token,TEST_HOME_PAGE_BASE);
verify(mockSynapse).addTeamMember(joinTeamToken, TEST_HOME_PAGE_BASE+"#!Team:", TEST_HOME_PAGE_BASE+"#!SignedToken:Settings/");
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenInvalidJoinTeam() throws RestServiceException, SynapseException{
String tokenTypeName = NotificationTokenType.JoinTeam.name();
SignedTokenInterface token = synapseClient.hexDecodeAndSerialize(tokenTypeName, "invalid token");
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenJoinTeamWrongToken() throws RestServiceException, SynapseException{
String tokenTypeName = NotificationTokenType.JoinTeam.name();
SignedTokenInterface token = synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedNotificationSettingsToken);
}
@Test
public void testHandleSignedTokenNotificationSettings() throws RestServiceException, SynapseException{
String tokenTypeName = NotificationTokenType.Settings.name();
SignedTokenInterface token = synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedNotificationSettingsToken);
synapseClient.handleSignedToken(token, TEST_HOME_PAGE_BASE);
verify(mockSynapse).updateNotificationSettings(notificationSettingsToken);
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenInvalidNotificationSettings() throws RestServiceException, SynapseException{
String tokenTypeName = NotificationTokenType.Settings.name();
SignedTokenInterface token = synapseClient.hexDecodeAndSerialize(tokenTypeName, "invalid token");
}
@Test(expected = BadRequestException.class)
public void testHandleSignedTokenNotificationSettingsWrongToken() throws RestServiceException, SynapseException{
String tokenTypeName = NotificationTokenType.Settings.name();
SignedTokenInterface token = synapseClient.hexDecodeAndSerialize(tokenTypeName, encodedJoinTeamToken);
}
@Test
public void testGetOrCreateActivityForEntityVersionGet() throws SynapseException, RestServiceException {
when(mockSynapse.getActivityForEntityVersion(anyString(), anyLong())).thenReturn(new Activity());
synapseClient.getOrCreateActivityForEntityVersion(entityId, version);
verify(mockSynapse).getActivityForEntityVersion(entityId, version);
}
@Test
public void testGetOrCreateActivityForEntityVersionCreate() throws SynapseException, RestServiceException {
when(mockSynapse.getActivityForEntityVersion(anyString(), anyLong())).thenThrow(new SynapseNotFoundException());
when(mockSynapse.createActivity(any(Activity.class))).thenReturn(mockActivity);
synapseClient.getOrCreateActivityForEntityVersion(entityId, version);
verify(mockSynapse).getActivityForEntityVersion(entityId, version);
verify(mockSynapse).createActivity(any(Activity.class));
verify(mockSynapse).putEntity(mockSynapse.getEntityById(entityId), mockActivity.getId());
}
@Test(expected = Exception.class)
public void testGetOrCreateActivityForEntityVersionFailure() throws SynapseException, RestServiceException {
when(mockSynapse.getActivityForEntityVersion(anyString(), anyLong())).thenThrow(new Exception());
synapseClient.getOrCreateActivityForEntityVersion(entityId, version);
}
@Test
public void testGetMyLocationSettingBanners() throws SynapseException, RestServiceException {
List<StorageLocationSetting> existingStorageLocations = new ArrayList<StorageLocationSetting>();
StorageLocationSetting storageLocation = new ExternalS3StorageLocationSetting();
storageLocation.setBanner("Banner 1");
existingStorageLocations.add(storageLocation);
storageLocation = new ExternalStorageLocationSetting();
storageLocation.setBanner("Another Banner");
existingStorageLocations.add(storageLocation);
storageLocation = new ExternalStorageLocationSetting();
storageLocation.setBanner("Banner 1");
existingStorageLocations.add(storageLocation);
when(mockSynapse.getMyStorageLocationSettings()).thenReturn(existingStorageLocations);
List<String> banners = synapseClient.getMyLocationSettingBanners();
verify(mockSynapse).getMyStorageLocationSettings();
//should be 2 (only returns unique values)
assertEquals(2, banners.size());
//and alphabetically sorted
assertEquals(Arrays.asList("Another Banner", "Banner 1"), banners);
}
@Test(expected = Exception.class)
public void testGetMyLocationSettingBannersFailure() throws SynapseException, RestServiceException {
when(mockSynapse.getMyStorageLocationSettings()).thenThrow(new Exception());
synapseClient.getMyLocationSettingBanners();
}
@Test
public void testGetStorageLocationSettingNullSetting() throws SynapseException, RestServiceException {
when(mockSynapse.getProjectSetting(entityId, ProjectSettingsType.upload)).thenReturn(null);
assertNull(synapseClient.getStorageLocationSetting(entityId));
}
@Test
public void testGetStorageLocationSettingEmptyLocations() throws SynapseException, RestServiceException {
UploadDestinationListSetting setting = new UploadDestinationListSetting();
setting.setLocations(Collections.EMPTY_LIST);
when(mockSynapse.getProjectSetting(entityId, ProjectSettingsType.upload)).thenReturn(setting);
assertNull(synapseClient.getStorageLocationSetting(entityId));
}
@Test
public void testGetStorageLocationSetting() throws SynapseException, RestServiceException {
UploadDestinationListSetting setting = new UploadDestinationListSetting();
setting.setLocations(Collections.singletonList(42L));
when(mockSynapse.getProjectSetting(entityId, ProjectSettingsType.upload)).thenReturn(setting);
StorageLocationSetting mockStorageLocationSetting = Mockito.mock(StorageLocationSetting.class);
when(mockSynapse.getMyStorageLocationSetting(anyLong())).thenReturn(mockStorageLocationSetting);
assertEquals(mockStorageLocationSetting, synapseClient.getStorageLocationSetting(entityId));
}
@Test(expected = Exception.class)
public void testGetStorageLocationSettingFailure() throws SynapseException, RestServiceException {
when(mockSynapse.getMyStorageLocationSetting(anyLong())).thenThrow(new Exception());
synapseClient.getStorageLocationSetting(entityId);
}
}
| SWC-1954: tests for creatingStorageLocationSetting
| src/test/java/org/sagebionetworks/web/unitserver/SynapseClientImplTest.java | SWC-1954: tests for creatingStorageLocationSetting | <ide><path>rc/test/java/org/sagebionetworks/web/unitserver/SynapseClientImplTest.java
<ide> import org.sagebionetworks.repo.model.principal.AddEmailInfo;
<ide> import org.sagebionetworks.repo.model.project.ExternalS3StorageLocationSetting;
<ide> import org.sagebionetworks.repo.model.project.ExternalStorageLocationSetting;
<add>import org.sagebionetworks.repo.model.project.ProjectSetting;
<ide> import org.sagebionetworks.repo.model.project.ProjectSettingsType;
<ide> import org.sagebionetworks.repo.model.project.StorageLocationSetting;
<ide> import org.sagebionetworks.repo.model.project.UploadDestinationListSetting;
<ide> synapseClient.getOrCreateActivityForEntityVersion(entityId, version);
<ide> }
<ide>
<del>
<del> @Test
<del> public void testGetMyLocationSettingBanners() throws SynapseException, RestServiceException {
<add> private void setupGetMyLocationSettings() throws SynapseException, RestServiceException{
<ide> List<StorageLocationSetting> existingStorageLocations = new ArrayList<StorageLocationSetting>();
<ide> StorageLocationSetting storageLocation = new ExternalS3StorageLocationSetting();
<add> storageLocation.setStorageLocationId(1L);
<ide> storageLocation.setBanner("Banner 1");
<ide> existingStorageLocations.add(storageLocation);
<ide>
<ide> storageLocation = new ExternalStorageLocationSetting();
<add> storageLocation.setStorageLocationId(2L);
<ide> storageLocation.setBanner("Another Banner");
<add> ((ExternalStorageLocationSetting)storageLocation).setUrl("sftp://www.jayhodgson.com");
<ide> existingStorageLocations.add(storageLocation);
<ide>
<ide> storageLocation = new ExternalStorageLocationSetting();
<add> storageLocation.setStorageLocationId(3L);
<ide> storageLocation.setBanner("Banner 1");
<ide> existingStorageLocations.add(storageLocation);
<ide>
<ide> when(mockSynapse.getMyStorageLocationSettings()).thenReturn(existingStorageLocations);
<add> }
<add>
<add> @Test
<add> public void testGetMyLocationSettingBanners() throws SynapseException, RestServiceException {
<add> setupGetMyLocationSettings();
<ide> List<String> banners = synapseClient.getMyLocationSettingBanners();
<ide> verify(mockSynapse).getMyStorageLocationSettings();
<ide> //should be 2 (only returns unique values)
<ide> synapseClient.getStorageLocationSetting(entityId);
<ide> }
<ide>
<add> @Test
<add> public void testCreateStorageLocationSettingFoundStorageAndProjectSetting() throws SynapseException, RestServiceException {
<add> setupGetMyLocationSettings();
<add>
<add> UploadDestinationListSetting projectSetting = new UploadDestinationListSetting();
<add> projectSetting.setLocations(Collections.EMPTY_LIST);
<add> when(mockSynapse.getProjectSetting(entityId, ProjectSettingsType.upload)).thenReturn(projectSetting);
<add>
<add> //test the case when it finds a duplicate storage location.
<add> ExternalStorageLocationSetting setting = new ExternalStorageLocationSetting();
<add> setting.setBanner("Another Banner");
<add> setting.setUrl("sftp://www.jayhodgson.com");
<add>
<add> synapseClient.createStorageLocationSetting(entityId, setting);
<add> //should have found the duplicate storage location, so this is never called
<add> verify(mockSynapse, Mockito.never()).createStorageLocationSetting(any(StorageLocationSetting.class));
<add> //verify updates project setting, and the new location list is a single value (id of existing storage location)
<add> ArgumentCaptor<ProjectSetting> captor = ArgumentCaptor.forClass(ProjectSetting.class);
<add> verify(mockSynapse).updateProjectSetting(captor.capture());
<add> UploadDestinationListSetting updatedProjectSetting = (UploadDestinationListSetting)captor.getValue();
<add> List<Long> locations = updatedProjectSetting.getLocations();
<add> assertEquals(new Long(2), locations.get(0));
<add> }
<add>
<add>
<add> @Test
<add> public void testCreateStorageLocationSettingNewStorageAndProjectSetting() throws SynapseException, RestServiceException {
<add> setupGetMyLocationSettings();
<add> when(mockSynapse.getProjectSetting(entityId, ProjectSettingsType.upload)).thenReturn(null);
<add>
<add> //test the case when it does not find duplicate storage location setting.
<add> ExternalStorageLocationSetting setting = new ExternalStorageLocationSetting();
<add> setting.setBanner("Another Banner");
<add> setting.setUrl("sftp://www.google.com");
<add>
<add> Long newStorageLocationId = 1007L;
<add> ExternalStorageLocationSetting createdSetting = new ExternalStorageLocationSetting();
<add> createdSetting.setStorageLocationId(newStorageLocationId);
<add>
<add> when(mockSynapse.createStorageLocationSetting(any(StorageLocationSetting.class))).thenReturn(createdSetting);
<add>
<add> synapseClient.createStorageLocationSetting(entityId, setting);
<add> //should not have found a duplicate storage location, so this should be called
<add> verify(mockSynapse).createStorageLocationSetting(any(StorageLocationSetting.class));
<add> //verify creates new project setting, and the new location list is a single value (id of the new storage location)
<add> ArgumentCaptor<ProjectSetting> captor = ArgumentCaptor.forClass(ProjectSetting.class);
<add> verify(mockSynapse).createProjectSetting(captor.capture());
<add> UploadDestinationListSetting updatedProjectSetting = (UploadDestinationListSetting)captor.getValue();
<add> List<Long> locations = updatedProjectSetting.getLocations();
<add> assertEquals(newStorageLocationId, locations.get(0));
<add> assertEquals(ProjectSettingsType.upload, updatedProjectSetting.getSettingsType());
<add> assertEquals(entityId, updatedProjectSetting.getProjectId());
<add> }
<add>
<ide> } |
|
Java | mit | fe3b4499459f55f7c60cd67eb4fc2953f099fe44 | 0 | ghostcity/gematik-specifications,eHealthExperts/gematik-specifications | package de.ehex.foss.gematik.specifications;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2234;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2238;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2239;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2241;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2242;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2244;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2245;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2247;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2250;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2251;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2252;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2254;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2255;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2257;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2260;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2261;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2265;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2268;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_2730;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_2769;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_2781;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_2805;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_2820;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_3202;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_3212;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4121;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4122;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4126;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4127;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4132;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4133;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4149;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4157;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4158;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4911;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4912;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4913;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4914;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4915;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4916;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4918;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4919;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4920;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4929;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4932;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_5052;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_5060;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_5061;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_5062;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2797;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2799;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2800;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2802;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2803;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2812;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2813;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2814;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2815;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2824;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2825;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2826;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2827;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2828;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2829;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2830;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2831;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2832;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2845;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2846;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2853;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2854;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2858;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2862;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2912;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2913;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2914;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2915;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2916;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2917;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2918;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2919;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2920;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2921;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2922;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2923;
import static de.ehex.foss.gematik.specifications.gemRL_Betr_TI.AFOs.*;
import static de.ehex.foss.gematik.specifications.gemRL_TSL_SP_CP.AFOs.*;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2095;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2096;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2097;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2098;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2099;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2100;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2101;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2102;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2103;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2104;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2106;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2107;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2108;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2109;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2111;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2112;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2114;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2115;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2116;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2117;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2118;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2119;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2121;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2122;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2123;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2124;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2125;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2126;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2127;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2128;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2129;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2003;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2004;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2005;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2006;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2007;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2008;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2009;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2010;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2011;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2012;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2013;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2014;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2015;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2016;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2017;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2018;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2019;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2020;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2021;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2022;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2023;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2024;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2025;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2026;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2027;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2028;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2029;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2030;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2031;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2032;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2033;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2034;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2035;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2036;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2037;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2038;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2039;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2040;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2041;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2042;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2043;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2044;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2045;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2046;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2047;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2048;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2049;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2050;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2052;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2057;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2058;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2059;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2060;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2061;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2062;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2063;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2064;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2065;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2066;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2067;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2069;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2070;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2071;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2072;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2074;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2075;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2076;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2077;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2079;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2080;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2081;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2082;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2083;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2084;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2085;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2086;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2087;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2088;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2089;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2090;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2091;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2092;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2094;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2176;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2177;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2178;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2179;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2180;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2181;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2182;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2184;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2190;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2191;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2192;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2193;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2225;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2230;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4435;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4436;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4437;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4448;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4449;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4450;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4451;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4453;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4455;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4456;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4457;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4458;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4459;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4460;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4461;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4462;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4463;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4464;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4465;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4466;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4467;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4468;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4470;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4471;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4472;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4473;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4474;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4475;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4476;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4477;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4478;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4479;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4480;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4481;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4482;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2130;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2131;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2132;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2133;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2134;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2135;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2136;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2137;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2138;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2139;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2140;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2141;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2142;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2143;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2144;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2145;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2146;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2147;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2148;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2149;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2150;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2151;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2152;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2154;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2155;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2156;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2157;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2158;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2159;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2160;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2161;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2162;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2163;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2164;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2165;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2166;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2167;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2168;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2169;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2171;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2185;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2186;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2187;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2188;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2189;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2223;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2224;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2231;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2277;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2278;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4503;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4504;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4505;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4506;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4507;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4508;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4509;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4510;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4511;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4512;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4513;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4514;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4515;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4516;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4517;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4518;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4519;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4520;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4521;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4522;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4523;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4524;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4525;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4526;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4527;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4528;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4529;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4530;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4531;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4532;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4533;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4534;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4535;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4537;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4538;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4539;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4540;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2348;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2349;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2350;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2351;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2353;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2356;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2357;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2358;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2359;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2547;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2548;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2549;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2550;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2669;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2673;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2704;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2706;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2707;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2712;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2747;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2748;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2761;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2940;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2942;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2943;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_3006;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_3021;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_3022;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_3023;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_3028;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4514;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4515;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4518;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4689;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4693;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4696;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4812;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5401;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5516;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5517;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5518;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5519;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5520;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5521;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4357;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4359;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4361;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4362;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4367;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4368;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4378;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4380;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4384;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4385;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4386;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4387;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4388;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4393;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_5035;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_5079;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_5131;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_5322;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3824;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3832;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3833;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3834;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3839;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3840;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3841;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3842;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3928;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3930;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3931;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3932;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3933;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3934;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3937;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3939;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3946;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4009;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4010;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4011;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4012;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4013;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4018;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4024;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4027;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4033;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4035;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4036;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4759;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4762;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4763;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4805;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4808;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4809;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4810;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4817;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4819;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4820;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4831;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4832;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_5089;
import static de.ehex.foss.gematik.specifications.gemSpec_OID.AFOs.GS_A_4442;
import static de.ehex.foss.gematik.specifications.gemSpec_OID.AFOs.GS_A_4444;
import static de.ehex.foss.gematik.specifications.gemSpec_OID.AFOs.GS_A_4445;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3695;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3696;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3697;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3702;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3796;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3801;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3804;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3805;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3806;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3807;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3813;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3816;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3856;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_4541;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_4543;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_4545;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_4547;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_4864;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5018;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5025;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5033;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5038;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5039;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5040;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5054;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5252;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4572;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4573;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4574;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4575;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4576;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4577;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4588;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4590;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4595;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4596;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4598;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4599;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4637;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4640;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4641;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4642;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4643;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4646;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4647;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4648;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4649;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4650;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4651;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4652;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4653;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4654;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4655;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4656;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4657;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4660;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4661;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4662;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4663;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4669;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4670;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4673;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4675;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4677;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4678;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4679;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4680;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4684;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4685;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4686;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4687;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4688;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4689;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4690;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4691;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4692;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4694;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4697;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4704;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4705;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4706;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4714;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4715;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4716;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4717;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4718;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4719;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4720;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4721;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4722;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4723;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4724;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4727;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4730;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4731;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4732;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4735;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4737;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4738;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4739;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4740;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4741;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4748;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4749;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4751;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4829;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4898;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4899;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4902;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4935;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4936;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4957;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_5077;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_5090;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_5215;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_5336;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_3055;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_3058;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4145;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4146;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4147;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4148;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4149;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4155;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4159;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4160;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5028;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5029;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5030;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5031;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5032;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5073;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5092;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5093;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5094;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5095;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5134;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5136;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5137;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5138;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5139;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5143;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2280;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2281;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2283;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2286;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2287;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2288;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2290;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2291;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2292;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2293;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2294;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2295;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2297;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2302;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2305;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2314;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2315;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2316;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2317;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2322;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2323;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2324;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2325;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2326;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2327;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2328;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2329;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2331;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2332;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2333;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2334;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2339;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2341;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2342;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2546;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2751;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2961;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2999;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_3009;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2222;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2223;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2225;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2226;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2227;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2228;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2231;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2233;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2234;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2236;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2240;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2241;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2242;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2253;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2254;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2255;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2256;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2257;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2258;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2259;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2260;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2261;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2262;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2263;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2264;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2266;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2267;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2268;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2269;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2270;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2271;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2272;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2273;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2274;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2279;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2299;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2301;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2313;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2330;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2674;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2902;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2950;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2952;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2955;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2958;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_3001;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_3002;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_3026;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_3027;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3737;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3747;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3753;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3756;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3760;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3772;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3784;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_4980;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_4981;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_4982;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_4983;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_4984;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2012;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2021;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2046;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2047;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2065;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2070;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2071;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2072;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2073;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2074;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2075;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2076;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2087;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2156;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2174;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2177;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2213;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2214;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2309;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2326;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2328;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2329;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2330;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2331;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2332;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2333;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2339;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2343;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2345;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2347;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2350;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2354;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2355;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2356;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2357;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2359;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2360;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2361;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2362;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2363;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2366;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2524;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2525;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_3078;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_3125;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_3130;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_3139;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_3141;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_3149;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_4944;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_4945;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_4946;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_4947;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_5017;
import static de.ehex.foss.gematik.specifications.gemSpec_St_Ampel.AFOs.*;
import static de.ehex.foss.gematik.specifications.gemSpec_TSL.AFOs.TIP1_A_5120;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5566;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5568;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5570;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5588;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5589;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5591;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5592;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5594;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5596;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5598;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5600;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5601;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5603;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3547;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3548;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3549;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3550;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3554;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3555;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3557;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3558;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3564;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3565;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3567;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3569;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3574;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3575;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3576;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3577;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3580;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3581;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3590;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3591;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3592;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3594;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3595;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3596;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3630;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3631;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3632;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3633;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3634;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3635;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3637;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3638;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3639;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3640;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3642;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3660;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3877;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3880;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3881;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3883;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3884;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3886;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3887;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3888;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4230;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4231;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4232;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4234;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4235;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4427;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4428;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_5087;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_5376;
import static de.ehex.foss.gematik.specifications.gemSpec_eGK_Fach_VSDM.AFOs.VSDM_A_2973;
import static de.ehex.foss.gematik.specifications.gemSpec_eGK_Fach_VSDM.AFOs.VSDM_A_2974;
import static de.ehex.foss.gematik.specifications.gemSpec_eGK_Fach_VSDM.AFOs.VSDM_A_2975;
import static de.ehex.foss.gematik.specifications.gemSpec_eGK_Fach_VSDM.AFOs.VSDM_A_2976;
import static de.ehex.foss.gematik.specifications.gemSpec_eGK_Fach_VSDM.AFOs.VSDM_A_2994;
import static java.util.Arrays.asList;
import static java.util.Collections.unmodifiableSet;
import static java.util.Objects.nonNull;
import static java.util.stream.Collectors.toSet;
import static java.util.stream.Stream.concat;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Enumeration of all – currently considered – gematik {@link PTStB}s.
*
* @author Stefan Gasterstädt
* @since September 21st, 2016
*/
public enum PTStBs implements PTStB {
gemProdT_CM_KOMLE_PTV1_2_0("gemProdT_CM_KOMLE_PTV1.2.0",
asList(KOM_LE_A_2004, KOM_LE_A_2012, KOM_LE_A_2013, KOM_LE_A_2016, KOM_LE_A_2017, KOM_LE_A_2176, KOM_LE_A_2021, KOM_LE_A_2022, KOM_LE_A_2178, KOM_LE_A_2192, KOM_LE_A_2024, KOM_LE_A_2025, KOM_LE_A_2028, KOM_LE_A_2034,
KOM_LE_A_2038, KOM_LE_A_2039, KOM_LE_A_2042, KOM_LE_A_2179, KOM_LE_A_2046, KOM_LE_A_2047, KOM_LE_A_2048, KOM_LE_A_2049, KOM_LE_A_2180, KOM_LE_A_2050, KOM_LE_A_2057, KOM_LE_A_2058, KOM_LE_A_2062, KOM_LE_A_2063,
KOM_LE_A_2066, KOM_LE_A_2067, KOM_LE_A_2069, KOM_LE_A_2181, KOM_LE_A_2070, KOM_LE_A_2071, KOM_LE_A_2072, KOM_LE_A_2074, KOM_LE_A_2079, KOM_LE_A_2080, KOM_LE_A_2081, KOM_LE_A_2082, KOM_LE_A_2083, KOM_LE_A_2084,
KOM_LE_A_2085, KOM_LE_A_2086, KOM_LE_A_2087, KOM_LE_A_2088, KOM_LE_A_2089, KOM_LE_A_2090, KOM_LE_A_2091, KOM_LE_A_2184, KOM_LE_A_2092, KOM_LE_A_2225, KOM_LE_A_2230, KOM_LE_A_2094, GS_A_4359, GS_A_4385, GS_A_4386,
GS_A_5035, GS_A_5038, GS_A_5136, TIP1_A_4126, TIP1_A_4157),
asList(GS_A_2350, GS_A_2354, GS_A_2524, GS_A_2525, GS_A_3695, GS_A_3696, GS_A_3697, GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3882, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888,
GS_A_3889, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905,
GS_A_3906, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_3908, GS_A_3909, GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913,
GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3919, GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965,
GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971, GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989,
GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_4000, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093,
GS_A_4094, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099, GS_A_4100, GS_A_4101, GS_A_4102, GS_A_4103, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4118, GS_A_4119, GS_A_4120, GS_A_4121, GS_A_4122,
GS_A_4123, GS_A_4124, GS_A_4125, GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4132, GS_A_4133, GS_A_4134, GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4367, GS_A_4397, GS_A_4398,
GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4413, GS_A_4414, GS_A_4415, GS_A_4416, GS_A_4417, GS_A_4418, GS_A_4419,
GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4424, GS_A_4425, GS_A_4541, GS_A_4831, GS_A_4855, GS_A_4892, GS_A_4893, GS_A_5039, GS_A_5086, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_5251, KOM_LE_A_2003, KOM_LE_A_2005, KOM_LE_A_2006,
KOM_LE_A_2007, KOM_LE_A_2008, KOM_LE_A_2009, KOM_LE_A_2010, KOM_LE_A_2011, KOM_LE_A_2014, KOM_LE_A_2015, KOM_LE_A_2018, KOM_LE_A_2019, KOM_LE_A_2020, KOM_LE_A_2023, KOM_LE_A_2026, KOM_LE_A_2027, KOM_LE_A_2029,
KOM_LE_A_2030, KOM_LE_A_2031, KOM_LE_A_2032, KOM_LE_A_2033, KOM_LE_A_2035, KOM_LE_A_2036, KOM_LE_A_2037, KOM_LE_A_2040, KOM_LE_A_2041, KOM_LE_A_2043, KOM_LE_A_2044, KOM_LE_A_2045, KOM_LE_A_2052, KOM_LE_A_2059,
KOM_LE_A_2060, KOM_LE_A_2061, KOM_LE_A_2064, KOM_LE_A_2065, KOM_LE_A_2075, KOM_LE_A_2076, KOM_LE_A_2077, KOM_LE_A_2095, KOM_LE_A_2096, KOM_LE_A_2097, KOM_LE_A_2098, KOM_LE_A_2099, KOM_LE_A_2100, KOM_LE_A_2101,
KOM_LE_A_2102, KOM_LE_A_2103, KOM_LE_A_2104, KOM_LE_A_2106, KOM_LE_A_2107, KOM_LE_A_2108, KOM_LE_A_2109, KOM_LE_A_2111, KOM_LE_A_2112, KOM_LE_A_2114, KOM_LE_A_2115, KOM_LE_A_2116, KOM_LE_A_2117, KOM_LE_A_2118,
KOM_LE_A_2119, KOM_LE_A_2121, KOM_LE_A_2122, KOM_LE_A_2123, KOM_LE_A_2124, KOM_LE_A_2125, KOM_LE_A_2126, KOM_LE_A_2127, KOM_LE_A_2128, KOM_LE_A_2129, KOM_LE_A_2177, KOM_LE_A_2182, KOM_LE_A_2190, KOM_LE_A_2191,
KOM_LE_A_2193, KOM_LE_A_2234, KOM_LE_A_2238, KOM_LE_A_2239, KOM_LE_A_2241, KOM_LE_A_2242, KOM_LE_A_2244, KOM_LE_A_2245, KOM_LE_A_2247, KOM_LE_A_2260, KOM_LE_A_2261, KOM_LE_A_2265, KOM_LE_A_2268, TIP1_A_2730, TIP1_A_2769,
TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, TIP1_A_4121, TIP1_A_4122, TIP1_A_4127, TIP1_A_4132, TIP1_A_4133, TIP1_A_4149, TIP1_A_4158, TIP1_A_4929, TIP1_A_5052)),
gemProdT_FD_KOMLE_PTV1_2_0("gemProdT_FD_KOMLE_PTV1.2.0",
asList(TIP1_A_4126, TIP1_A_4157, KOM_LE_A_2130, KOM_LE_A_2131, KOM_LE_A_2132, KOM_LE_A_2223, KOM_LE_A_2224, KOM_LE_A_2278, KOM_LE_A_2277, KOM_LE_A_2134, KOM_LE_A_2135, KOM_LE_A_2136, KOM_LE_A_2139, KOM_LE_A_2140, KOM_LE_A_2141,
KOM_LE_A_2186, KOM_LE_A_2142, KOM_LE_A_2145, KOM_LE_A_2146, KOM_LE_A_2147, KOM_LE_A_2149, KOM_LE_A_2150, KOM_LE_A_2151, KOM_LE_A_2154, KOM_LE_A_2155, KOM_LE_A_2156, KOM_LE_A_2157, KOM_LE_A_2187, KOM_LE_A_2188,
KOM_LE_A_2158, KOM_LE_A_2159, KOM_LE_A_2160, KOM_LE_A_2161, KOM_LE_A_2163, KOM_LE_A_2165, KOM_LE_A_2166, KOM_LE_A_2167, KOM_LE_A_2168, KOM_LE_A_2169, KOM_LE_A_2231, GS_A_4359, GS_A_4386, GS_A_4009, GS_A_4831, GS_A_4832,
GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_4036, GS_A_4763, GS_A_3932, GS_A_3842, GS_A_3931, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3934, GS_A_4819, GS_A_3937, GS_A_3695, GS_A_3696, GS_A_5025,
GS_A_3702, GS_A_4543, GS_A_4545, GS_A_3856, GS_A_4547, GS_A_5252, GS_A_3801, GS_A_3796, GS_A_3816, GS_A_3804, GS_A_3807, GS_A_3806, GS_A_5038, GS_A_4864, GS_A_4146, GS_A_4147, GS_A_4148, GS_A_4149, GS_A_5137, GS_A_5138,
GS_A_4637, GS_A_4829, GS_A_4642, GS_A_4643, GS_A_4646, GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4651, GS_A_4898, GS_A_4899, GS_A_4652, GS_A_4653, GS_A_4654, GS_A_4655, GS_A_4656, GS_A_4657, GS_A_4660, GS_A_4749,
GS_A_4661, GS_A_4662, GS_A_4663, GS_A_4751, GS_A_4957, GS_A_5215, TIP1_A_5120, TIP1_A_5566, TIP1_A_5591, TIP1_A_5594, TIP1_A_5596, TIP1_A_5600, TIP1_A_5603, TIP1_A_5598),
asList(GS_A_2012, GS_A_2021, GS_A_2046, GS_A_2047, GS_A_2065, GS_A_2070, GS_A_2071, GS_A_2072, GS_A_2073, GS_A_2074, GS_A_2075, GS_A_2076, GS_A_2087, GS_A_2156, GS_A_2174, GS_A_2177, GS_A_2213, GS_A_2214, GS_A_2309, GS_A_2326,
GS_A_2328, GS_A_2329, GS_A_2330, GS_A_2331, GS_A_2332, GS_A_2333, GS_A_2339, GS_A_2343, GS_A_2345, GS_A_2347, GS_A_2355, GS_A_2356, GS_A_2357, GS_A_2359, GS_A_2360, GS_A_2361, GS_A_2362, GS_A_2363, GS_A_2366, GS_A_3078,
GS_A_3125, GS_A_3130, GS_A_3139, GS_A_3141, GS_A_3149, GS_A_3697, GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3756, GS_A_3760, GS_A_3772, GS_A_3784, GS_A_3805, GS_A_3813, GS_A_3824, GS_A_3839, GS_A_3841, GS_A_3876, GS_A_3877,
GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3882, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902,
GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_3908, GS_A_3909, GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3919, GS_A_3920, GS_A_3921, GS_A_3922,
GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971, GS_A_3972, GS_A_3975, GS_A_3976,
GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998,
GS_A_3999, GS_A_4000, GS_A_4018, GS_A_4027, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4094, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099, GS_A_4100, GS_A_4101,
GS_A_4102, GS_A_4103, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4118, GS_A_4119, GS_A_4120, GS_A_4121, GS_A_4122, GS_A_4123, GS_A_4124, GS_A_4125, GS_A_4126,
GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4132, GS_A_4133, GS_A_4134, GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4330, GS_A_4367, GS_A_4368, GS_A_4384, GS_A_4385, GS_A_4387, GS_A_4388,
GS_A_4397, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4413, GS_A_4414, GS_A_4415, GS_A_4416, GS_A_4417,
GS_A_4418, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423, GS_A_4424, GS_A_4425, GS_A_4435, GS_A_4436, GS_A_4437, GS_A_4448, GS_A_4449, GS_A_4450, GS_A_4451, GS_A_4453, GS_A_4455, GS_A_4456, GS_A_4457, GS_A_4458,
GS_A_4459, GS_A_4460, GS_A_4461, GS_A_4462, GS_A_4463, GS_A_4464, GS_A_4465, GS_A_4466, GS_A_4467, GS_A_4468, GS_A_4470, GS_A_4471, GS_A_4472, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4476, GS_A_4477, GS_A_4478, GS_A_4479,
GS_A_4480, GS_A_4481, GS_A_4482, GS_A_4503, GS_A_4504, GS_A_4505, GS_A_4506, GS_A_4507, GS_A_4508, GS_A_4509, GS_A_4510, GS_A_4511, GS_A_4512, GS_A_4513, GS_A_4514, GS_A_4515, GS_A_4516, GS_A_4517, GS_A_4518, GS_A_4519,
GS_A_4520, GS_A_4521, GS_A_4522, GS_A_4523, GS_A_4524, GS_A_4525, GS_A_4526, GS_A_4527, GS_A_4528, GS_A_4529, GS_A_4530, GS_A_4531, GS_A_4532, GS_A_4533, GS_A_4534, GS_A_4535, GS_A_4537, GS_A_4538, GS_A_4539, GS_A_4540,
GS_A_4541, GS_A_4640, GS_A_4641, GS_A_4720, GS_A_4748, GS_A_4759, GS_A_4805, GS_A_4808, GS_A_4855, GS_A_4892, GS_A_4893, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984,
GS_A_5017, GS_A_5033, GS_A_5035, GS_A_5039, GS_A_5040, GS_A_5054, GS_A_5134, GS_A_5139, GS_A_5143, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_5251, KOM_LE_A_2133, KOM_LE_A_2137, KOM_LE_A_2138, KOM_LE_A_2143, KOM_LE_A_2144,
KOM_LE_A_2148, KOM_LE_A_2152, KOM_LE_A_2162, KOM_LE_A_2164, KOM_LE_A_2171, KOM_LE_A_2185, KOM_LE_A_2189, KOM_LE_A_2234, KOM_LE_A_2238, KOM_LE_A_2250, KOM_LE_A_2251, KOM_LE_A_2252, KOM_LE_A_2254, KOM_LE_A_2255,
KOM_LE_A_2257, KOM_LE_A_2260, KOM_LE_A_2261, KOM_LE_A_2265, KOM_LE_A_2268, TIP1_A_2730, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, TIP1_A_4132, TIP1_A_4133, TIP1_A_4149, TIP1_A_4158, TIP1_A_4929, TIP1_A_4932,
TIP1_A_5052, TIP1_A_5568, TIP1_A_5588, TIP1_A_5589, TIP1_A_5592, TIP1_A_5601)),
gemProdT_FD_VSDM_PTV1_4_0("gemProdT_FD_VSDM_PTV1.4.0",
asList(TIP1_A_4126, TIP1_A_4157, TIP1_A_5120, GS_A_4386, GS_A_4009, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_4036, GS_A_4763, GS_A_4809, GS_A_3932, GS_A_3834, GS_A_3842, GS_A_3930, GS_A_3931,
GS_A_3839, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3933, GS_A_3934, GS_A_4819, GS_A_3937, GS_A_3695, GS_A_3696, GS_A_3702, GS_A_4543, GS_A_4545, GS_A_3856, GS_A_4547, GS_A_3801, GS_A_3796, GS_A_3816, GS_A_3804,
GS_A_3807, GS_A_3806, GS_A_3805, GS_A_5025, GS_A_5031, GS_A_5092, GS_A_5093, GS_A_5094, GS_A_5095, GS_A_4637, GS_A_4829, GS_A_4642, GS_A_4643, GS_A_4646, GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4651, GS_A_4898,
GS_A_4899, GS_A_4652, GS_A_4653, GS_A_4654, GS_A_4655, GS_A_4656, GS_A_4657, GS_A_4660, GS_A_4749, GS_A_4661, GS_A_4662, GS_A_4663, GS_A_4751, GS_A_4957, GS_A_5215, VSDM_A_2226, VSDM_A_2241, VSDM_A_2242, VSDM_A_2253,
VSDM_A_2254, VSDM_A_2255, VSDM_A_2256, VSDM_A_2257, VSDM_A_2258, VSDM_A_2259, VSDM_A_2260, VSDM_A_2261, VSDM_A_2262, VSDM_A_2263, VSDM_A_2266, VSDM_A_2267, VSDM_A_2269, VSDM_A_2270, VSDM_A_2272, VSDM_A_2273, VSDM_A_2274,
VSDM_A_2279, VSDM_A_2313, VSDM_A_2902, VSDM_A_2952, VSDM_A_2955, VSDM_A_3001, VSDM_A_2297, VSDM_A_2299, VSDM_A_2301, VSDM_A_2302, VSDM_A_2305, VSDM_A_2314, VSDM_A_2315, VSDM_A_2316, VSDM_A_2317, VSDM_A_2322, VSDM_A_2323,
VSDM_A_2324, VSDM_A_2325, VSDM_A_2326, VSDM_A_2327, VSDM_A_2328, VSDM_A_2330, VSDM_A_2331, VSDM_A_2332, VSDM_A_2333, VSDM_A_2334, VSDM_A_2339, VSDM_A_2264, VSDM_A_2294, VSDM_A_2341, VSDM_A_2546, VSDM_A_2961, VSDM_A_2973,
VSDM_A_2974, VSDM_A_2975, VSDM_A_2976, VSDM_A_2994, VSDM_A_3009, VSDM_A_2751, VSDM_A_2329, VSDM_A_2280, VSDM_A_2281, VSDM_A_2283, VSDM_A_2286, VSDM_A_2287, VSDM_A_2288, VSDM_A_2290, VSDM_A_2291, VSDM_A_2292, VSDM_A_2293,
VSDM_A_2342, VSDM_A_2295),
asList(TIP1_A_4133, TIP1_A_4149, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, VSDM_A_2800, VSDM_A_2802, VSDM_A_2803, VSDM_A_2813, VSDM_A_2814, VSDM_A_2815, VSDM_A_2824, VSDM_A_2825, VSDM_A_2830, VSDM_A_2831, VSDM_A_2832,
VSDM_A_2846, VSDM_A_2853, VSDM_A_2854, VSDM_A_2858, VSDM_A_2862, VSDM_A_2958, GS_A_3824, GS_A_4018, GS_A_4027, GS_A_4759, GS_A_3839, GS_A_5089, GS_A_3695, GS_A_3696, GS_A_3697, GS_A_4541, GS_A_4831, GS_A_5054, GS_A_5038,
GS_A_5039, GS_A_5040, GS_A_5018, GS_A_5032, GS_A_5095, GS_A_4640, GS_A_5033, GS_A_3805, GS_A_3813, GS_A_3928, TIP1_A_4158, VSDM_A_2268, VSDM_A_2999, VSDM_A_2827, VSDM_A_2828, VSDM_A_2812, VSDM_A_2826, VSDM_A_2829,
TIP1_A_4932, GS_A_4330, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4479, GS_A_4523, GS_A_4524, GS_A_4528, GS_A_4529, GS_A_4384, GS_A_4385, GS_A_4386, GS_A_4387, GS_A_5035, GS_A_3839, GS_A_3841, GS_A_4808, GS_A_4641,
GS_A_4748, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784, GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2087, GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021,
GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947, GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_2331, GS_A_2332, GS_A_2345, GS_A_2347, GS_A_2356, GS_A_2361, GS_A_2363, GS_A_2366, GS_A_4378,
GS_A_4380, GS_A_4368, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099, GS_A_4094, GS_A_4100, GS_A_4101, GS_A_4102,
GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4415, GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423,
GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119,
GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896,
GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_3909, GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919,
GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971,
GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000,
GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_4121, GS_A_4122, GS_A_4123, GS_A_4124, GS_A_4125, GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134,
GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4805, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_5250, GS_A_5251, VSDM_A_2802, VSDM_A_2912, VSDM_A_2914, VSDM_A_2913, VSDM_A_2915, VSDM_A_2916, VSDM_A_2917,
VSDM_A_2918, VSDM_A_2919, VSDM_A_2920, VSDM_A_2921, VSDM_A_2922, VSDM_A_2923, TIP1_A_2730, GS_A_4720)),
gemProdT_FD_VSDM_PTV1_5_0("gemProdT_FD_VSDM_PTV1.5.0",
asList(TIP1_A_4126, TIP1_A_4157, TIP1_A_5120, GS_A_4386, GS_A_4009, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_4036, GS_A_4763, GS_A_4809, GS_A_3932, GS_A_3834, GS_A_3842, GS_A_3930, GS_A_3931,
GS_A_3839, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3933, GS_A_3934, GS_A_4819, GS_A_3937, GS_A_3695, GS_A_3696, GS_A_3702, GS_A_4543, GS_A_4545, GS_A_3856, GS_A_4547, GS_A_3801, GS_A_3796, GS_A_3816, GS_A_3804,
GS_A_3807, GS_A_3806, GS_A_3805, GS_A_5025, GS_A_5031, GS_A_5092, GS_A_5093, GS_A_5094, GS_A_5095, GS_A_4637, GS_A_4829, GS_A_4642, GS_A_4643, GS_A_4646, GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4651, GS_A_4898,
GS_A_4899, GS_A_4652, GS_A_4653, GS_A_4654, GS_A_4655, GS_A_4656, GS_A_4657, GS_A_4660, GS_A_4749, GS_A_4661, GS_A_4662, GS_A_4663, GS_A_4751, GS_A_4957, GS_A_5077, GS_A_5215, GS_A_5336, VSDM_A_2226, VSDM_A_2241,
VSDM_A_2242, VSDM_A_2253, VSDM_A_2254, VSDM_A_2255, VSDM_A_2256, VSDM_A_2257, VSDM_A_2258, VSDM_A_2259, VSDM_A_2260, VSDM_A_2261, VSDM_A_2262, VSDM_A_2263, VSDM_A_2266, VSDM_A_2267, VSDM_A_2269, VSDM_A_2270, VSDM_A_2272,
VSDM_A_2273, VSDM_A_2274, VSDM_A_2279, VSDM_A_2313, VSDM_A_2902, VSDM_A_2952, VSDM_A_2955, VSDM_A_3001, VSDM_A_2297, VSDM_A_2299, VSDM_A_2301, VSDM_A_2302, VSDM_A_2305, VSDM_A_2314, VSDM_A_2315, VSDM_A_2316, VSDM_A_2317,
VSDM_A_2322, VSDM_A_2323, VSDM_A_2324, VSDM_A_2325, VSDM_A_2326, VSDM_A_2327, VSDM_A_2328, VSDM_A_2330, VSDM_A_2331, VSDM_A_2332, VSDM_A_2333, VSDM_A_2334, VSDM_A_2339, VSDM_A_2264, VSDM_A_2294, VSDM_A_2341, VSDM_A_2546,
VSDM_A_2961, VSDM_A_2973, VSDM_A_2974, VSDM_A_2975, VSDM_A_2976, VSDM_A_2994, VSDM_A_3009, VSDM_A_2751, VSDM_A_2329, VSDM_A_2280, VSDM_A_2281, VSDM_A_2283, VSDM_A_2286, VSDM_A_2287, VSDM_A_2288, VSDM_A_2290, VSDM_A_2291,
VSDM_A_2292, VSDM_A_2293, VSDM_A_2342, VSDM_A_2295),
asList(TIP1_A_4133, TIP1_A_4149, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, VSDM_A_2800, VSDM_A_2802, VSDM_A_2803, VSDM_A_2813, VSDM_A_2814, VSDM_A_2815, VSDM_A_2824, VSDM_A_2825, VSDM_A_2830, VSDM_A_2831, VSDM_A_2832,
VSDM_A_2846, VSDM_A_2853, VSDM_A_2854, VSDM_A_2858, VSDM_A_2862, VSDM_A_2958, GS_A_3824, GS_A_4018, GS_A_4027, GS_A_4759, GS_A_3839, GS_A_5089, GS_A_3695, GS_A_3696, GS_A_3697, GS_A_4541, GS_A_4831, GS_A_5054, GS_A_5038,
GS_A_5039, GS_A_5040, GS_A_5018, GS_A_5032, GS_A_5095, GS_A_4640, GS_A_5033, GS_A_3805, GS_A_3813, GS_A_3928, TIP1_A_4158, VSDM_A_2268, VSDM_A_2999, VSDM_A_2827, VSDM_A_2828, VSDM_A_2812, VSDM_A_2826, VSDM_A_2829,
TIP1_A_4932, GS_A_4330, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4479, GS_A_4523, GS_A_4524, GS_A_4528, GS_A_4529, GS_A_4384, GS_A_4385, GS_A_4386, GS_A_4387, GS_A_5035, GS_A_3839, GS_A_3841, GS_A_4808, GS_A_4641,
GS_A_4748, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784, GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2087, GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021,
GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947, GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_2331, GS_A_2332, GS_A_2345, GS_A_2347, GS_A_2356, GS_A_2361, GS_A_2363, GS_A_2366, GS_A_4378,
GS_A_4380, GS_A_4368, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099, GS_A_4094, GS_A_4100, GS_A_4101, GS_A_4102,
GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4415, GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423,
GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119,
GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896,
GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_3909, GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919,
GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971,
GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000,
GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_4121, GS_A_4122, GS_A_4123, GS_A_4124, GS_A_4125, GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134,
GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4805, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_5250, GS_A_5251, VSDM_A_2802, VSDM_A_2912, VSDM_A_2914, VSDM_A_2913, VSDM_A_2915, VSDM_A_2916, VSDM_A_2917,
VSDM_A_2918, VSDM_A_2919, VSDM_A_2920, VSDM_A_2921, VSDM_A_2922, VSDM_A_2923, TIP1_A_2730, GS_A_4720)),
gemProdT_FD_VSDM_PTV1_5_0_1("gemProdT_FD_VSDM_PTV1.5.0-1",
asList(TIP1_A_4126, TIP1_A_4157, TIP1_A_5120, TIP1_A_5993, TIP1_A_5996, TIP1_A_5997, TIP1_A_5998, TIP1_A_6003, GS_A_4386, GS_A_4009, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_4036, GS_A_4763, GS_A_4809, GS_A_3932, GS_A_3834, GS_A_3842, GS_A_3930, GS_A_3931,
GS_A_3839, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3933, GS_A_3934, GS_A_4819, GS_A_3937, GS_A_3695, GS_A_3696, GS_A_3702, GS_A_4543, GS_A_4545, GS_A_3856, GS_A_4547, GS_A_3801, GS_A_3796, GS_A_3816, GS_A_3804,
GS_A_3807, GS_A_3806, GS_A_3805, GS_A_5025, GS_A_5031, GS_A_5092, GS_A_5093, GS_A_5094, GS_A_5095, GS_A_4637, GS_A_4829, GS_A_4642, GS_A_4643, GS_A_4646, GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4651, GS_A_4898,
GS_A_4899, GS_A_4652, GS_A_4653, GS_A_4654, GS_A_4655, GS_A_4656, GS_A_4657, GS_A_4660, GS_A_4749, GS_A_4661, GS_A_4662, GS_A_4663, GS_A_4751, GS_A_4957, GS_A_5077, GS_A_5215, GS_A_5336, VSDM_A_2226, VSDM_A_2241,
VSDM_A_2242, VSDM_A_2253, VSDM_A_2254, VSDM_A_2255, VSDM_A_2256, VSDM_A_2257, VSDM_A_2258, VSDM_A_2259, VSDM_A_2260, VSDM_A_2261, VSDM_A_2262, VSDM_A_2263, VSDM_A_2266, VSDM_A_2267, VSDM_A_2269, VSDM_A_2270, VSDM_A_2272,
VSDM_A_2273, VSDM_A_2274, VSDM_A_2279, VSDM_A_2313, VSDM_A_2902, VSDM_A_2952, VSDM_A_2955, VSDM_A_3001, VSDM_A_2297, VSDM_A_2299, VSDM_A_2301, VSDM_A_2302, VSDM_A_2305, VSDM_A_2314, VSDM_A_2315, VSDM_A_2316, VSDM_A_2317,
VSDM_A_2322, VSDM_A_2323, VSDM_A_2324, VSDM_A_2325, VSDM_A_2326, VSDM_A_2327, VSDM_A_2328, VSDM_A_2330, VSDM_A_2331, VSDM_A_2332, VSDM_A_2333, VSDM_A_2334, VSDM_A_2339, VSDM_A_2264, VSDM_A_2294, VSDM_A_2341, VSDM_A_2546,
VSDM_A_2961, VSDM_A_2973, VSDM_A_2974, VSDM_A_2975, VSDM_A_2976, VSDM_A_2994, VSDM_A_3009, VSDM_A_2751, VSDM_A_2329, VSDM_A_2280, VSDM_A_2281, VSDM_A_2283, VSDM_A_2286, VSDM_A_2287, VSDM_A_2288, VSDM_A_2290, VSDM_A_2291,
VSDM_A_2292, VSDM_A_2293, VSDM_A_2342, VSDM_A_2295),
asList(TIP1_A_4133, TIP1_A_4149, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, VSDM_A_2800, VSDM_A_2802, VSDM_A_2803, VSDM_A_2813, VSDM_A_2814, VSDM_A_2815, VSDM_A_2824, VSDM_A_2825, VSDM_A_2830, VSDM_A_2831, VSDM_A_2832,
VSDM_A_2846, VSDM_A_2853, VSDM_A_2854, VSDM_A_2858, VSDM_A_2862, VSDM_A_2958, GS_A_3824, GS_A_4018, GS_A_4027, GS_A_4759, GS_A_3839, GS_A_5089, GS_A_3695, GS_A_3696, GS_A_3697, GS_A_4541, GS_A_4831, GS_A_5054, GS_A_5038,
GS_A_5039, GS_A_5040, GS_A_5018, GS_A_5032, GS_A_5095, GS_A_4640, GS_A_5033, GS_A_3805, GS_A_3813, GS_A_3928, TIP1_A_4158, VSDM_A_2268, VSDM_A_2999, VSDM_A_2827, VSDM_A_2828, VSDM_A_2812, VSDM_A_2826, VSDM_A_2829,
TIP1_A_4932, GS_A_4330, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4479, GS_A_4523, GS_A_4524, GS_A_4528, GS_A_4529, GS_A_4384, GS_A_4385, GS_A_4386, GS_A_4387, GS_A_5035, GS_A_3839, GS_A_3841, GS_A_4808, GS_A_4641,
GS_A_4748, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784, GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2087, GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021,
GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947, GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_2331, GS_A_2332, GS_A_2345, GS_A_2347, GS_A_2356, GS_A_2361, GS_A_2363, GS_A_2366, GS_A_4378,
GS_A_4380, GS_A_4368, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099, GS_A_4094, GS_A_4100, GS_A_4101, GS_A_4102,
GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4415, GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423,
GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119,
GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896,
GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_3909, GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919,
GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971,
GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000,
GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_4121, GS_A_4122, GS_A_4123, GS_A_4124, GS_A_4125, GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134,
GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4805, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_5250, GS_A_5251, VSDM_A_2802, VSDM_A_2912, VSDM_A_2914, VSDM_A_2913, VSDM_A_2915, VSDM_A_2916, VSDM_A_2917,
VSDM_A_2918, VSDM_A_2919, VSDM_A_2920, VSDM_A_2921, VSDM_A_2922, VSDM_A_2923, TIP1_A_2730, GS_A_4720)),
gemProdT_X_509_TSP_nonQES_eGK_PTV1_5_0_V1_0_0("gemProdT_X.509_TSP_nonQES_eGK_PTV1.5.0",
asList(GS_A_4009, GS_A_4831, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_3834, GS_A_3842, GS_A_4810, GS_A_3931, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3934, GS_A_3937, GS_A_3939, GS_A_3946,
GS_A_5038, GS_A_4146, GS_A_4147, GS_A_4148, GS_A_4149, GS_A_4145, GS_A_4159, GS_A_4160, GS_A_4721, GS_A_4669, GS_A_4673, GS_A_4675, GS_A_4677, GS_A_4678, GS_A_4679, GS_A_4684, GS_A_4686, GS_A_4687, GS_A_4688, GS_A_4690,
GS_A_4691, GS_A_4692, GS_A_5090, GS_A_4694, TIP1_A_3888),
asList(TIP1_A_4121, TIP1_A_4122, TIP1_A_4126, TIP1_A_4127, TIP1_A_4132, TIP1_A_4133, TIP1_A_4157, TIP1_A_4158, TIP1_A_2730, TIP1_A_5052, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, TIP1_A_3212, GS_A_4177, GS_A_4178,
GS_A_4179, GS_A_4180, GS_A_4181, GS_A_4182, GS_A_4183, GS_A_4185, GS_A_4208, GS_A_4906, GS_A_4395, GS_A_4213, GS_A_4348, GS_A_4219, GS_A_4225, GS_A_4228, GS_A_4229, GS_A_4234, GS_A_4237, GS_A_4250, GS_A_4251, GS_A_4283,
GS_A_4302, GS_A_4303, GS_A_4355, GS_A_4352, GS_A_4909, GS_A_4910, GS_A_4911, GS_A_4912, GS_A_4913, GS_A_4914, GS_A_4915, GS_A_4916, GS_A_4917, GS_A_4919, GS_A_4923, GS_A_4924, GS_A_4926, GS_A_4927, GS_A_4928, GS_A_4929,
GS_A_4931, GS_A_4933, GS_A_4831, GS_A_4010, GS_A_4011, GS_A_4012, GS_A_4759, GS_A_4036, GS_A_4763, GS_A_3824, GS_A_3932, GS_A_4810, GS_A_3931, GS_A_4820, GS_A_4442, GS_A_4444, GS_A_4445, GS_A_3695, GS_A_3696, GS_A_3697,
GS_A_4541, GS_A_5025, GS_A_5039, GS_A_3702, GS_A_4543, GS_A_4545, GS_A_3813, GS_A_3804, GS_A_3807, GS_A_3805, GS_A_3806, GS_A_5033, GS_A_3055, GS_A_3058, GS_A_4705, GS_A_4706, GS_A_4572, GS_A_4573, GS_A_4575, GS_A_4576,
GS_A_4577, GS_A_4714, GS_A_4715, GS_A_4716, GS_A_4717, GS_A_4718, GS_A_4719, GS_A_4722, GS_A_4723, GS_A_4724, GS_A_4588, GS_A_4590, GS_A_4595, GS_A_4596, GS_A_4598, GS_A_4599, GS_A_4730, GS_A_4731, GS_A_4732, GS_A_4735,
GS_A_4737, GS_A_4902, GS_A_4738, GS_A_4739, GS_A_4740, GS_A_4741, GS_A_4935, GS_A_4936, GS_A_4685, GS_A_4689, TIP1_A_3555, TIP1_A_3558, TIP1_A_3574, TIP1_A_3575, TIP1_A_3576, TIP1_A_3577, TIP1_A_3591, TIP1_A_3886,
TIP1_A_3594, TIP1_A_3639, TIP1_A_3640, TIP1_A_4932, GS_A_4173, GS_A_4191, GS_A_4230, GS_A_4396, GS_A_4243, GS_A_4247, GS_A_4249, GS_A_4255, GS_A_4259, GS_A_4260, GS_A_4261, GS_A_4268, GS_A_4270, GS_A_4271, GS_A_4272,
GS_A_4273, GS_A_4274, GS_A_4275, GS_A_4276, GS_A_4279, GS_A_4284, GS_A_4285, GS_A_4287, GS_A_4288, GS_A_4289, GS_A_4290, GS_A_4291, GS_A_4292, GS_A_4294, GS_A_4295, GS_A_4304, GS_A_4305, GS_A_4306, GS_A_4307, GS_A_4308,
GS_A_4309, GS_A_4310, GS_A_4311, GS_A_4312, GS_A_4313, GS_A_4314, GS_A_4315, GS_A_4316, GS_A_4317, GS_A_4925, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4479, GS_A_4523, GS_A_4524, GS_A_4357, GS_A_4361, GS_A_4362, GS_A_4367,
GS_A_4368, GS_A_4388, GS_A_4393, GS_A_5131, GS_A_5079, GS_A_4817, GS_A_4574, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784, GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2087,
GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021, GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947, GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_2331, GS_A_2332, GS_A_2345,
GS_A_2347, GS_A_2356, GS_A_2361, GS_A_2363, GS_A_2366, TIP1_A_5087, TIP1_A_4230, TIP1_A_4231, TIP1_A_4232, TIP1_A_4234, TIP1_A_4235, TIP1_A_3660, TIP1_A_3548, TIP1_A_3549, TIP1_A_3550, TIP1_A_3881, TIP1_A_3554,
TIP1_A_3555, TIP1_A_3557, TIP1_A_3590, TIP1_A_3595, TIP1_A_3596, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099,
GS_A_4094, GS_A_4855, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_4100, GS_A_4101, GS_A_4102, GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4415,
GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423, GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4112,
GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119, GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888,
GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_5250, GS_A_3909, GS_A_3910, GS_A_3911,
GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919, GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962,
GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971, GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986,
GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000, GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_5251, GS_A_4121, GS_A_4122, GS_A_4123, GS_A_4124, GS_A_4125,
GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134, GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, TIP1_A_5060, TIP1_A_5061, TIP1_A_5062, TIP1_A_4911,
TIP1_A_4912, TIP1_A_4913, TIP1_A_4914, TIP1_A_4915, TIP1_A_4916, TIP1_A_4918, TIP1_A_4919, TIP1_A_4920, TIP1_A_4149, TIP1_A_2805, GS_A_4173, GS_A_4174, GS_A_4175, GS_A_4176, GS_A_4186, GS_A_4187, GS_A_4188, GS_A_4189,
GS_A_4190, GS_A_4192, GS_A_4195, GS_A_4199, GS_A_4201, GS_A_4202, GS_A_5083, GS_A_4203, GS_A_4205, GS_A_4207, GS_A_4209, GS_A_4394, GS_A_4210, GS_A_4211, GS_A_4212, GS_A_4214, GS_A_4215, GS_A_4216, GS_A_4217, GS_A_4218,
GS_A_4221, GS_A_4349, GS_A_4226, GS_A_4227, GS_A_4230, GS_A_4231, GS_A_4234, GS_A_4235, GS_A_4236, GS_A_4238, GS_A_4241, GS_A_4242, GS_A_4245, GS_A_4248, GS_A_4250, GS_A_4252, GS_A_4254, GS_A_4256, GS_A_4257, GS_A_4262,
GS_A_5084, GS_A_4263, GS_A_4264, GS_A_4265, GS_A_4266, GS_A_4267, GS_A_4269, GS_A_4276, GS_A_4277, GS_A_4278, GS_A_4281, GS_A_4282, GS_A_4296, GS_A_4297, GS_A_4299, GS_A_4300, GS_A_4318, GS_A_4319, GS_A_4321, GS_A_4322,
GS_A_4323, GS_A_4324, GS_A_4325, GS_A_4326, GS_A_4327, GS_A_4328, GS_A_4332, GS_A_4908, GS_A_4925, GS_A_4927, GS_A_4930, GS_A_4018, GS_A_4027, GS_A_4805, GS_A_5018, GS_A_4149, GS_A_4155, GS_A_5028, GS_A_4697, GS_A_4704,
GS_A_4727, GS_A_4670, GS_A_4680, TIP1_A_3547, TIP1_A_3877, TIP1_A_3880, TIP1_A_4427, TIP1_A_4428, TIP1_A_3630, TIP1_A_5376, TIP1_A_3883, TIP1_A_3558, TIP1_A_3564, TIP1_A_3565, TIP1_A_3567, TIP1_A_3569, TIP1_A_3580,
TIP1_A_3581, TIP1_A_3884, TIP1_A_3592, TIP1_A_3887, TIP1_A_3596, TIP1_A_3631, TIP1_A_3632, TIP1_A_3633, TIP1_A_3634, TIP1_A_3635, TIP1_A_3637, TIP1_A_3638, TIP1_A_3642)),
gemProdT_X_509_TSP_nonQES_eGK_PTV1_6_0_V1_2_1("gemProdT_X.509_TSP_nonQES_eGK_PTV1.6.0",
asList(TIP1_A_5993, TIP1_A_5996, TIP1_A_5997, TIP1_A_5998, TIP1_A_6002, GS_A_4009, GS_A_4831, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_3834, GS_A_3842, GS_A_4810, GS_A_3931, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3934, GS_A_3937, GS_A_3939, GS_A_3946,
GS_A_5038, GS_A_4146, GS_A_4147, GS_A_4148, GS_A_4149, GS_A_4145, GS_A_4159, GS_A_4160, GS_A_4721, GS_A_4669, GS_A_4673, GS_A_4675, GS_A_4677, GS_A_4678, GS_A_4679, GS_A_4684, GS_A_4686, GS_A_4687, GS_A_4688, GS_A_4690,
GS_A_4691, GS_A_4692, GS_A_5077, GS_A_5090, GS_A_4694, TIP1_A_3888),
asList(TIP1_A_4121, TIP1_A_4122, TIP1_A_4126, TIP1_A_4127, TIP1_A_4132, TIP1_A_4133, TIP1_A_4157, TIP1_A_4158, TIP1_A_2730, TIP1_A_5052, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, TIP1_A_3212, GS_A_4177, GS_A_4178,
GS_A_4179, GS_A_4180, GS_A_4181, GS_A_4182, GS_A_4183, GS_A_4185, GS_A_4208, GS_A_4906, GS_A_4395, GS_A_4213, GS_A_4348, GS_A_4219, GS_A_4225, GS_A_4228, GS_A_4229, GS_A_4234, GS_A_4237, GS_A_4250, GS_A_4251, GS_A_4283,
GS_A_4302, GS_A_4303, GS_A_4355, GS_A_4352, GS_A_4909, GS_A_4910, GS_A_4911, GS_A_4912, GS_A_4913, GS_A_4914, GS_A_4915, GS_A_4916, GS_A_4917, GS_A_4919, GS_A_4923, GS_A_4924, GS_A_4926, GS_A_4927, GS_A_4928, GS_A_4929,
GS_A_4931, GS_A_4933, GS_A_4831, GS_A_4010, GS_A_4011, GS_A_4012, GS_A_4759, GS_A_4036, GS_A_4763, GS_A_3824, GS_A_3932, GS_A_4810, GS_A_3931, GS_A_4820, GS_A_4442, GS_A_4444, GS_A_4445, GS_A_3695, GS_A_3696, GS_A_3697,
GS_A_4541, GS_A_5025, GS_A_5039, GS_A_3702, GS_A_4543, GS_A_4545, GS_A_3813, GS_A_3804, GS_A_3807, GS_A_3805, GS_A_3806, GS_A_5033, GS_A_3055, GS_A_3058, GS_A_4705, GS_A_4706, GS_A_4572, GS_A_4573, GS_A_4575, GS_A_4576,
GS_A_4577, GS_A_4714, GS_A_4715, GS_A_4716, GS_A_4717, GS_A_4718, GS_A_4719, GS_A_4722, GS_A_4723, GS_A_4724, GS_A_4588, GS_A_4590, GS_A_4595, GS_A_4596, GS_A_4598, GS_A_4599, GS_A_4730, GS_A_4731, GS_A_4732, GS_A_4735,
GS_A_4737, GS_A_4902, GS_A_4738, GS_A_4739, GS_A_4740, GS_A_4741, GS_A_4935, GS_A_4936, GS_A_4685, GS_A_4689, TIP1_A_3555, TIP1_A_3558, TIP1_A_3574, TIP1_A_3575, TIP1_A_3576, TIP1_A_3577, TIP1_A_3591, TIP1_A_3886,
TIP1_A_3594, TIP1_A_3639, TIP1_A_3640, TIP1_A_4932, GS_A_4173, GS_A_4191, GS_A_4230, GS_A_4396, GS_A_4243, GS_A_4247, GS_A_4249, GS_A_4255, GS_A_4259, GS_A_4260, GS_A_4261, GS_A_4268, GS_A_4270, GS_A_4271, GS_A_4272,
GS_A_4273, GS_A_4274, GS_A_4275, GS_A_4276, GS_A_4279, GS_A_4284, GS_A_4285, GS_A_4287, GS_A_4288, GS_A_4289, GS_A_4290, GS_A_4291, GS_A_4292, GS_A_4294, GS_A_4295, GS_A_4304, GS_A_4305, GS_A_4306, GS_A_4307, GS_A_4308,
GS_A_4309, GS_A_4310, GS_A_4311, GS_A_4312, GS_A_4313, GS_A_4314, GS_A_4315, GS_A_4316, GS_A_4317, GS_A_4925, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4479, GS_A_4523, GS_A_4524, GS_A_4357, GS_A_4361, GS_A_4362, GS_A_4367,
GS_A_4368, GS_A_4388, GS_A_4393, GS_A_5131, GS_A_5079, GS_A_4817, GS_A_4574, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784, GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2087,
GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021, GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947, GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_2331, GS_A_2332, GS_A_2345,
GS_A_2347, GS_A_2356, GS_A_2361, GS_A_2363, GS_A_2366, TIP1_A_5087, TIP1_A_4230, TIP1_A_4231, TIP1_A_4232, TIP1_A_4234, TIP1_A_4235, TIP1_A_3660, TIP1_A_3548, TIP1_A_3549, TIP1_A_3550, TIP1_A_3881, TIP1_A_3554,
TIP1_A_3555, TIP1_A_3557, TIP1_A_3590, TIP1_A_3595, TIP1_A_3596, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099,
GS_A_4094, GS_A_4855, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_4100, GS_A_4101, GS_A_4102, GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4415,
GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423, GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4112,
GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119, GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888,
GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_5250, GS_A_3909, GS_A_3910, GS_A_3911,
GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919, GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962,
GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971, GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986,
GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000, GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_5251, GS_A_4121, GS_A_4122, GS_A_4123, GS_A_4124, GS_A_4125,
GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134, GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, TIP1_A_5060, TIP1_A_5061, TIP1_A_5062, TIP1_A_4911,
TIP1_A_4912, TIP1_A_4913, TIP1_A_4914, TIP1_A_4915, TIP1_A_4916, TIP1_A_4918, TIP1_A_4919, TIP1_A_4920, TIP1_A_4149, TIP1_A_2805, GS_A_4173, GS_A_4174, GS_A_4175, GS_A_4176, GS_A_4186, GS_A_4187, GS_A_4188, GS_A_4189,
GS_A_4190, GS_A_4192, GS_A_4195, GS_A_4199, GS_A_4201, GS_A_4202, GS_A_5083, GS_A_4203, GS_A_4205, GS_A_4207, GS_A_4209, GS_A_4394, GS_A_4210, GS_A_4211, GS_A_4212, GS_A_4214, GS_A_4215, GS_A_4216, GS_A_4217, GS_A_4218,
GS_A_4221, GS_A_4349, GS_A_4226, GS_A_4227, GS_A_4230, GS_A_4231, GS_A_4234, GS_A_4235, GS_A_4236, GS_A_4238, GS_A_4241, GS_A_4242, GS_A_4245, GS_A_4248, GS_A_4250, GS_A_4252, GS_A_4254, GS_A_4256, GS_A_4257, GS_A_4262,
GS_A_5084, GS_A_4263, GS_A_4264, GS_A_4265, GS_A_4266, GS_A_4267, GS_A_4269, GS_A_4276, GS_A_4277, GS_A_4278, GS_A_4281, GS_A_4282, GS_A_4296, GS_A_4297, GS_A_4299, GS_A_4300, GS_A_4318, GS_A_4319, GS_A_4321, GS_A_4322,
GS_A_4323, GS_A_4324, GS_A_4325, GS_A_4326, GS_A_4327, GS_A_4328, GS_A_4332, GS_A_4908, GS_A_4925, GS_A_4927, GS_A_4930, GS_A_4018, GS_A_4027, GS_A_4805, GS_A_5018, GS_A_4149, GS_A_4155, GS_A_5028, GS_A_4697, GS_A_4704,
GS_A_4727, GS_A_4670, GS_A_4680, TIP1_A_3547, TIP1_A_3877, TIP1_A_3880, TIP1_A_4427, TIP1_A_4428, TIP1_A_3630, TIP1_A_5376, TIP1_A_3883, TIP1_A_3558, TIP1_A_3564, TIP1_A_3565, TIP1_A_3567, TIP1_A_3569, TIP1_A_3580,
TIP1_A_3581, TIP1_A_3884, TIP1_A_3592, TIP1_A_3887, TIP1_A_3596, TIP1_A_3631, TIP1_A_3632, TIP1_A_3633, TIP1_A_3634, TIP1_A_3635, TIP1_A_3637, TIP1_A_3638, TIP1_A_3642)),
gemProdT_Intermediaer_VSDM_PTV140_V100("gemProdT_Intermediär_VSDM_PTV1.4.0",
asList(TIP1_A_4126, TIP1_A_4157, VSDM_A_2348, VSDM_A_2349, VSDM_A_2350, VSDM_A_2351, VSDM_A_2353, VSDM_A_2356, VSDM_A_2357, VSDM_A_2358, VSDM_A_2359, VSDM_A_2547, VSDM_A_2548, VSDM_A_2549, VSDM_A_2550, VSDM_A_2673, VSDM_A_2704,
VSDM_A_2706, VSDM_A_2707, VSDM_A_2712, VSDM_A_2747, VSDM_A_2761, VSDM_A_2940, VSDM_A_3022, VSDM_A_3023, GS_A_4386, GS_A_4009, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_4036, GS_A_4763,
GS_A_4809, GS_A_3932, GS_A_3834, GS_A_3842, GS_A_3930, GS_A_3931, GS_A_3839, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3933, GS_A_3934, GS_A_4819, GS_A_3937, GS_A_3695, GS_A_3696, GS_A_5025, GS_A_5038, GS_A_3702,
GS_A_4543, GS_A_4545, GS_A_3856, GS_A_4547, GS_A_3801, GS_A_3796, GS_A_3816, GS_A_3804, GS_A_3807, GS_A_3806, GS_A_4146, GS_A_4147, GS_A_4148, GS_A_4149, GS_A_5029, GS_A_4637, GS_A_4829, GS_A_4642, GS_A_4643, GS_A_4646,
GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4651, GS_A_4898, GS_A_4899, GS_A_4652, GS_A_4653, GS_A_4654, GS_A_4655, GS_A_4656, GS_A_4657, GS_A_4660, GS_A_4749, GS_A_4661, GS_A_4662, GS_A_4663, GS_A_4751, GS_A_4957,
GS_A_5215, VSDM_A_2222, VSDM_A_2223, VSDM_A_2225, VSDM_A_2226, VSDM_A_2227, VSDM_A_2228, VSDM_A_2231, VSDM_A_2233, VSDM_A_2234, VSDM_A_2236, VSDM_A_2240, VSDM_A_2271, VSDM_A_2674, VSDM_A_2950, VSDM_A_3002, TIP1_A_5120),
asList(TIP1_A_4133, TIP1_A_4149, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, VSDM_A_2797, VSDM_A_2799, VSDM_A_2824, VSDM_A_2845, VSDM_A_2747, VSDM_A_2940, VSDM_A_2942, VSDM_A_2943, VSDM_A_3021, GS_A_4388, GS_A_4388,
GS_A_4831, GS_A_4018, GS_A_4027, GS_A_4759, GS_A_3824, GS_A_3928, GS_A_3839, GS_A_5089, GS_A_3695, GS_A_3696, GS_A_3697, GS_A_4541, GS_A_5054, GS_A_5039, GS_A_5040, GS_A_3813, GS_A_3805, GS_A_5033, GS_A_4149, GS_A_3055,
GS_A_5073, GS_A_5030, GS_A_4640, TIP1_A_4932, GS_A_4330, GS_A_4435, GS_A_4436, GS_A_4437, GS_A_4448, GS_A_4449, GS_A_4450, GS_A_4451, GS_A_4453, GS_A_4455, GS_A_4456, GS_A_4457, GS_A_4458, GS_A_4459, GS_A_4460,
GS_A_4461, GS_A_4462, GS_A_4463, GS_A_4464, GS_A_4465, GS_A_4466, GS_A_4467, GS_A_4468, GS_A_4470, GS_A_4471, GS_A_4472, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4476, GS_A_4477, GS_A_4478, GS_A_4479, GS_A_4480, GS_A_4481,
GS_A_4482, VSDM_A_2669, VSDM_A_2748, GS_A_4503, GS_A_4504, GS_A_4505, GS_A_4506, GS_A_4507, GS_A_4508, GS_A_4509, GS_A_4510, GS_A_4511, GS_A_4512, GS_A_4513, GS_A_4514, GS_A_4515, GS_A_4516, GS_A_4517, GS_A_4518,
GS_A_4519, GS_A_4520, GS_A_4521, GS_A_4522, GS_A_4523, GS_A_4524, GS_A_4525, GS_A_4526, GS_A_4527, GS_A_4528, GS_A_4529, GS_A_4530, GS_A_4531, GS_A_4532, GS_A_4533, GS_A_4534, GS_A_4535, GS_A_4537, GS_A_4538, GS_A_4539,
GS_A_4540, GS_A_4368, GS_A_4384, GS_A_4385, GS_A_4386, GS_A_4387, GS_A_5322, GS_A_5035, GS_A_4388, GS_A_3839, GS_A_3841, GS_A_4808, GS_A_4641, GS_A_4748, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784,
GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2214, GS_A_2065, GS_A_2087, GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021, GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947,
GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_5017, GS_A_2331, GS_A_2332, GS_A_2333, GS_A_2339, GS_A_2343, GS_A_2345, GS_A_2347, GS_A_2355, GS_A_2356, GS_A_2357, GS_A_2359, GS_A_2360, GS_A_2361,
GS_A_2362, GS_A_2363, GS_A_2366, GS_A_3078, GS_A_3125, GS_A_3130, GS_A_3139, GS_A_3141, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096,
GS_A_4097, GS_A_4099, GS_A_4094, GS_A_4855, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_4100, GS_A_4101, GS_A_4102, GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401,
GS_A_4402, GS_A_4415, GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423, GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411,
GS_A_4412, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119, GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886,
GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_5250, GS_A_3909,
GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919, GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960,
GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971, GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984,
GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000, GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_5251, GS_A_4121, GS_A_4122, GS_A_4123,
GS_A_4124, GS_A_4125, GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134, GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4805, TIP1_A_2730,
VSDM_A_3006, GS_A_4864, GS_A_4720, GS_A_2070, GS_A_2071, GS_A_2072, GS_A_2073, GS_A_2074, GS_A_2075, GS_A_2156)),
@AfoHistory(documentVersion = "gemProdT_Intermediaer_VSDM_PTV150_V100", description = "Aus OPB1-Paket - ist State of Change und noch nicht released. Identisch mit gemProdT_Intermediaer_VSDM_PTV140_V100 zzgl. AFO GS-A_3027") gemProdT_Intermediaer_VSDM_PTV150_V100(
"gemProdT_Intermediär_VSDM_PTV1.5.0",
asList(TIP1_A_4126, TIP1_A_4157, VSDM_A_2348, VSDM_A_2349, VSDM_A_2350, VSDM_A_2351, VSDM_A_2353, VSDM_A_2356, VSDM_A_2357, VSDM_A_2358, VSDM_A_2359, VSDM_A_2547, VSDM_A_2548, VSDM_A_2549, VSDM_A_2550, VSDM_A_2673, VSDM_A_2704,
VSDM_A_2706, VSDM_A_2707, VSDM_A_2712, VSDM_A_2747, VSDM_A_2761, VSDM_A_2940, VSDM_A_3022, VSDM_A_3023, GS_A_4386, GS_A_4009, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_4036, GS_A_4763,
GS_A_4809, GS_A_3932, GS_A_3834, GS_A_3842, GS_A_3930, GS_A_3931, GS_A_3839, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3933, GS_A_3934, GS_A_4819, GS_A_3937, GS_A_3695, GS_A_3696, GS_A_5025, GS_A_5038, GS_A_3702,
GS_A_4543, GS_A_4545, GS_A_3856, GS_A_4547, GS_A_3801, GS_A_3796, GS_A_3816, GS_A_3804, GS_A_3807, GS_A_3806, GS_A_4146, GS_A_4147, GS_A_4148, GS_A_4149, GS_A_5029, GS_A_4637, GS_A_4829, GS_A_4642, GS_A_4643, GS_A_4646,
GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4651, GS_A_4898, GS_A_4899, GS_A_4652, GS_A_4653, GS_A_4654, GS_A_4655, GS_A_4656, GS_A_4657, GS_A_4660, GS_A_4749, GS_A_4661, GS_A_4662, GS_A_4663, GS_A_4751, GS_A_4957,
GS_A_5077, GS_A_5131, GS_A_5215, GS_A_5336, VSDM_A_2222, VSDM_A_2223, VSDM_A_2225, VSDM_A_2226, VSDM_A_2227, VSDM_A_2228, VSDM_A_2231, VSDM_A_2233, VSDM_A_2234, VSDM_A_2236, VSDM_A_2240, VSDM_A_2271, VSDM_A_2674,
VSDM_A_2950, VSDM_A_3002, VSDM_A_3026, VSDM_A_3027, VSDM_A_3028, TIP1_A_5120),
asList(TIP1_A_4133, TIP1_A_4149, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, VSDM_A_2797, VSDM_A_2799, VSDM_A_2824, VSDM_A_2845, VSDM_A_2747, VSDM_A_2940, VSDM_A_2942, VSDM_A_2943, VSDM_A_3021, GS_A_4388, GS_A_4388,
GS_A_4831, GS_A_4018, GS_A_4027, GS_A_4759, GS_A_3824, GS_A_3928, GS_A_3839, GS_A_5089, GS_A_3695, GS_A_3696, GS_A_3697, GS_A_4541, GS_A_5054, GS_A_5039, GS_A_5040, GS_A_3813, GS_A_3805, GS_A_5033, GS_A_4149, GS_A_3055,
GS_A_5073, GS_A_5030, GS_A_4640, TIP1_A_4932, GS_A_4330, GS_A_4435, GS_A_4436, GS_A_4437, GS_A_4448, GS_A_4449, GS_A_4450, GS_A_4451, GS_A_4453, GS_A_4455, GS_A_4456, GS_A_4457, GS_A_4458, GS_A_4459, GS_A_4460,
GS_A_4461, GS_A_4462, GS_A_4463, GS_A_4464, GS_A_4465, GS_A_4466, GS_A_4467, GS_A_4468, GS_A_4470, GS_A_4471, GS_A_4472, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4476, GS_A_4477, GS_A_4478, GS_A_4479, GS_A_4480, GS_A_4481,
GS_A_4482, VSDM_A_2669, VSDM_A_2748, GS_A_4503, GS_A_4504, GS_A_4505, GS_A_4506, GS_A_4507, GS_A_4508, GS_A_4509, GS_A_4510, GS_A_4511, GS_A_4512, GS_A_4513, GS_A_4514, GS_A_4515, GS_A_4516, GS_A_4517, GS_A_4518,
GS_A_4519, GS_A_4520, GS_A_4521, GS_A_4522, GS_A_4523, GS_A_4524, GS_A_4525, GS_A_4526, GS_A_4527, GS_A_4528, GS_A_4529, GS_A_4530, GS_A_4531, GS_A_4532, GS_A_4533, GS_A_4534, GS_A_4535, GS_A_4537, GS_A_4538, GS_A_4539,
GS_A_4540, GS_A_4368, GS_A_4384, GS_A_4385, GS_A_4386, GS_A_4387, GS_A_5322, GS_A_5035, GS_A_4388, GS_A_3839, GS_A_3841, GS_A_4808, GS_A_4641, GS_A_4748, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784,
GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2214, GS_A_2065, GS_A_2087, GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021, GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947,
GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_5017, GS_A_2331, GS_A_2332, GS_A_2333, GS_A_2339, GS_A_2343, GS_A_2345, GS_A_2347, GS_A_2355, GS_A_2356, GS_A_2357, GS_A_2359, GS_A_2360, GS_A_2361,
GS_A_2362, GS_A_2363, GS_A_2366, GS_A_3078, GS_A_3125, GS_A_3130, GS_A_3139, GS_A_3141, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096,
GS_A_4097, GS_A_4099, GS_A_4094, GS_A_4855, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_4100, GS_A_4101, GS_A_4102, GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401,
GS_A_4402, GS_A_4415, GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423, GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411,
GS_A_4412, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119, GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886,
GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_5250, GS_A_3909,
GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919, GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960,
GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971, GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984,
GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000, GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_5251, GS_A_4121, GS_A_4122, GS_A_4123,
GS_A_4124, GS_A_4125, GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134, GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4805, TIP1_A_2730,
VSDM_A_3006, GS_A_4864, GS_A_4720, GS_A_2070, GS_A_2071, GS_A_2072, GS_A_2073, GS_A_2074, GS_A_2075, GS_A_2156)),
/**
* Partial definition of all AFOs of [gemProdT_Kon_PTV2.6.0] required to execute test targeting the LDAP-Proxy.
*/
gemProdT_Kon_PTV260_V100_LDAPProxy("gemProdT_Kon_PTV2.6.0 (LDAP-Proxy)", asList(GS_A_3695, GS_A_3804, GS_A_3856, GS_A_3931, GS_A_4386, TIP1_A_4514, TIP1_A_4515, TIP1_A_4518, TIP1_A_4689, TIP1_A_4693, TIP1_A_4696, TIP1_A_4812, TIP1_A_5401, TIP1_A_5516, TIP1_A_5517, TIP1_A_5518, TIP1_A_5519, TIP1_A_5520, TIP1_A_5521, TIP1_A_5568, TIP1_A_5570), asList(GS_A_3695, GS_A_3931, GS_A_4386, TIP1_A_4515, TIP1_A_4693, TIP1_A_4696, TIP1_A_4812)),
;
private PTStBs(final String reference, final List<AFO> testAFOs, final List<AFO> nonTestAFOs) {
assert nonNull(reference) : "There must be a non-null gematik reference identifier!";
assert !reference.isEmpty() : "There must be a non-empty gematik reference identifier!";
assert nonNull(testAFOs) : "There must be a non-null list of (test-relevant) AOFs!";
assert nonNull(nonTestAFOs) : "There must be a non-null list of (test-irrelevant) AOFs!";
this.reference = reference;
this.testAFOs = unmodifiableSet(new HashSet<>(testAFOs));
// assert disjoint(testAFOs, nonTestAFOs);
nonTestAFOs.stream().filter(testAFOs::contains).forEach(afo -> System.err.format("Hey dude; Please ask yourself (or the gematik) why %1$s contains AFO %2$s that is both testable and non-testable!", this.name(), afo));
this.afos = unmodifiableSet(concat(testAFOs.stream(), nonTestAFOs.stream()).collect(toSet()));
}
private final String reference;
@Override
public String getReference() {
return this.reference;
}
private final Set<AFO> afos;
@Override
public Set<AFO> getAFOs() {
return this.afos;
}
private final Set<AFO> testAFOs;
@Override
public Set<AFO> getTestableAFOs() {
return this.testAFOs;
}
}
| src/main/java/de/ehex/foss/gematik/specifications/PTStBs.java | package de.ehex.foss.gematik.specifications;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2234;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2238;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2239;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2241;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2242;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2244;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2245;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2247;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2250;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2251;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2252;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2254;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2255;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2257;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2260;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2261;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2265;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.KOM_LE_A_2268;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_2730;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_2769;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_2781;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_2805;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_2820;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_3202;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_3212;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4121;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4122;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4126;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4127;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4132;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4133;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4149;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4157;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4158;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4911;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4912;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4913;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4914;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4915;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4916;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4918;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4919;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4920;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4929;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_4932;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_5052;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_5060;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_5061;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.TIP1_A_5062;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2797;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2799;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2800;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2802;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2803;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2812;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2813;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2814;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2815;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2824;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2825;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2826;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2827;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2828;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2829;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2830;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2831;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2832;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2845;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2846;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2853;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2854;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2858;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2862;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2912;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2913;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2914;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2915;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2916;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2917;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2918;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2919;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2920;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2921;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2922;
import static de.ehex.foss.gematik.specifications.gemKPT_Test_ORS1.AFOs.VSDM_A_2923;
import static de.ehex.foss.gematik.specifications.gemRL_Betr_TI.AFOs.*;
import static de.ehex.foss.gematik.specifications.gemRL_TSL_SP_CP.AFOs.*;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2095;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2096;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2097;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2098;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2099;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2100;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2101;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2102;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2103;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2104;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2106;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2107;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2108;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2109;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2111;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2112;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2114;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2115;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2116;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2117;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2118;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2119;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2121;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2122;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2123;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2124;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2125;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2126;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2127;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2128;
import static de.ehex.foss.gematik.specifications.gemSMIME_KOMLE.AFOs.KOM_LE_A_2129;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2003;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2004;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2005;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2006;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2007;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2008;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2009;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2010;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2011;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2012;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2013;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2014;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2015;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2016;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2017;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2018;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2019;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2020;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2021;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2022;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2023;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2024;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2025;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2026;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2027;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2028;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2029;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2030;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2031;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2032;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2033;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2034;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2035;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2036;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2037;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2038;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2039;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2040;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2041;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2042;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2043;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2044;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2045;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2046;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2047;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2048;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2049;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2050;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2052;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2057;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2058;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2059;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2060;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2061;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2062;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2063;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2064;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2065;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2066;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2067;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2069;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2070;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2071;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2072;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2074;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2075;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2076;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2077;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2079;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2080;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2081;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2082;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2083;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2084;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2085;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2086;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2087;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2088;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2089;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2090;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2091;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2092;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2094;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2176;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2177;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2178;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2179;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2180;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2181;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2182;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2184;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2190;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2191;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2192;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2193;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2225;
import static de.ehex.foss.gematik.specifications.gemSpec_CM_KOMLE.AFOs.KOM_LE_A_2230;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4435;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4436;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4437;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4448;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4449;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4450;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4451;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4453;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4455;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4456;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4457;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4458;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4459;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4460;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4461;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4462;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4463;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4464;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4465;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4466;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4467;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4468;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4470;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4471;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4472;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4473;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4474;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4475;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4476;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4477;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4478;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4479;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4480;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4481;
import static de.ehex.foss.gematik.specifications.gemSpec_DSM.AFOs.GS_A_4482;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2130;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2131;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2132;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2133;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2134;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2135;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2136;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2137;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2138;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2139;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2140;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2141;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2142;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2143;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2144;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2145;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2146;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2147;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2148;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2149;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2150;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2151;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2152;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2154;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2155;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2156;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2157;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2158;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2159;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2160;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2161;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2162;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2163;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2164;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2165;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2166;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2167;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2168;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2169;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2171;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2185;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2186;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2187;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2188;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2189;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2223;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2224;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2231;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2277;
import static de.ehex.foss.gematik.specifications.gemSpec_FD_KOMLE.AFOs.KOM_LE_A_2278;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4503;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4504;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4505;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4506;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4507;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4508;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4509;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4510;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4511;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4512;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4513;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4514;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4515;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4516;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4517;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4518;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4519;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4520;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4521;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4522;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4523;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4524;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4525;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4526;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4527;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4528;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4529;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4530;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4531;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4532;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4533;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4534;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4535;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4537;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4538;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4539;
import static de.ehex.foss.gematik.specifications.gemSpec_ISM.AFOs.GS_A_4540;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2348;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2349;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2350;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2351;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2353;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2356;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2357;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2358;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2359;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2547;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2548;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2549;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2550;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2669;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2673;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2704;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2706;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2707;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2712;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2747;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2748;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2761;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2940;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2942;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_2943;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_3006;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_3021;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_3022;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_3023;
import static de.ehex.foss.gematik.specifications.gemSpec_Intermediär_VSDM.AFOs.VSDM_A_3028;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4514;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4515;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4518;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4689;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4693;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4696;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_4812;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5401;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5516;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5517;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5518;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5519;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5520;
import static de.ehex.foss.gematik.specifications.gemSpec_Kon.AFOs.TIP1_A_5521;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4357;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4359;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4361;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4362;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4367;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4368;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4378;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4380;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4384;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4385;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4386;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4387;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4388;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_4393;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_5035;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_5079;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_5131;
import static de.ehex.foss.gematik.specifications.gemSpec_Krypt.AFOs.GS_A_5322;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3824;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3832;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3833;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3834;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3839;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3840;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3841;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3842;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3928;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3930;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3931;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3932;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3933;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3934;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3937;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3939;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_3946;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4009;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4010;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4011;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4012;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4013;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4018;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4024;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4027;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4033;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4035;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4036;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4759;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4762;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4763;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4805;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4808;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4809;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4810;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4817;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4819;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4820;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4831;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_4832;
import static de.ehex.foss.gematik.specifications.gemSpec_Net.AFOs.GS_A_5089;
import static de.ehex.foss.gematik.specifications.gemSpec_OID.AFOs.GS_A_4442;
import static de.ehex.foss.gematik.specifications.gemSpec_OID.AFOs.GS_A_4444;
import static de.ehex.foss.gematik.specifications.gemSpec_OID.AFOs.GS_A_4445;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3695;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3696;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3697;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3702;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3796;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3801;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3804;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3805;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3806;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3807;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3813;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3816;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_3856;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_4541;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_4543;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_4545;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_4547;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_4864;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5018;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5025;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5033;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5038;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5039;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5040;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5054;
import static de.ehex.foss.gematik.specifications.gemSpec_OM.AFOs.GS_A_5252;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4572;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4573;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4574;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4575;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4576;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4577;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4588;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4590;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4595;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4596;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4598;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4599;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4637;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4640;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4641;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4642;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4643;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4646;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4647;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4648;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4649;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4650;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4651;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4652;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4653;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4654;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4655;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4656;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4657;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4660;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4661;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4662;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4663;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4669;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4670;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4673;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4675;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4677;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4678;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4679;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4680;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4684;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4685;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4686;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4687;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4688;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4689;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4690;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4691;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4692;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4694;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4697;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4704;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4705;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4706;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4714;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4715;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4716;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4717;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4718;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4719;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4720;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4721;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4722;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4723;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4724;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4727;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4730;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4731;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4732;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4735;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4737;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4738;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4739;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4740;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4741;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4748;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4749;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4751;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4829;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4898;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4899;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4902;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4935;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4936;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_4957;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_5077;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_5090;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_5215;
import static de.ehex.foss.gematik.specifications.gemSpec_PKI.AFOs.GS_A_5336;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_3055;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_3058;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4145;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4146;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4147;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4148;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4149;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4155;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4159;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_4160;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5028;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5029;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5030;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5031;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5032;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5073;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5092;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5093;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5094;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5095;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5134;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5136;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5137;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5138;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5139;
import static de.ehex.foss.gematik.specifications.gemSpec_Perf.AFOs.GS_A_5143;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2280;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2281;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2283;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2286;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2287;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2288;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2290;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2291;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2292;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2293;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2294;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2295;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2297;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2302;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2305;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2314;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2315;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2316;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2317;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2322;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2323;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2324;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2325;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2326;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2327;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2328;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2329;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2331;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2332;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2333;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2334;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2339;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2341;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2342;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2546;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2751;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2961;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_2999;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_FD_VSDM.AFOs.VSDM_A_3009;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2222;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2223;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2225;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2226;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2227;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2228;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2231;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2233;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2234;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2236;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2240;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2241;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2242;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2253;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2254;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2255;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2256;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2257;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2258;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2259;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2260;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2261;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2262;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2263;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2264;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2266;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2267;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2268;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2269;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2270;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2271;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2272;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2273;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2274;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2279;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2299;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2301;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2313;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2330;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2674;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2902;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2950;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2952;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2955;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_2958;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_3001;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_3002;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_3026;
import static de.ehex.foss.gematik.specifications.gemSpec_SST_VSDM.AFOs.VSDM_A_3027;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3737;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3747;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3753;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3756;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3760;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3772;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_3784;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_4980;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_4981;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_4982;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_4983;
import static de.ehex.foss.gematik.specifications.gemSpec_SiBetrUmg.AFOs.GS_A_4984;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2012;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2021;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2046;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2047;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2065;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2070;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2071;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2072;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2073;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2074;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2075;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2076;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2087;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2156;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2174;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2177;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2213;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2214;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2309;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2326;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2328;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2329;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2330;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2331;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2332;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2333;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2339;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2343;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2345;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2347;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2350;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2354;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2355;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2356;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2357;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2359;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2360;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2361;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2362;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2363;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2366;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2524;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_2525;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_3078;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_3125;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_3130;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_3139;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_3141;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_3149;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_4944;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_4945;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_4946;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_4947;
import static de.ehex.foss.gematik.specifications.gemSpec_Sich_DS.AFOs.GS_A_5017;
import static de.ehex.foss.gematik.specifications.gemSpec_St_Ampel.AFOs.*;
import static de.ehex.foss.gematik.specifications.gemSpec_TSL.AFOs.TIP1_A_5120;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5566;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5568;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5570;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5588;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5589;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5591;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5592;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5594;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5596;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5598;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5600;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5601;
import static de.ehex.foss.gematik.specifications.gemSpec_VZD.AFOs.TIP1_A_5603;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3547;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3548;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3549;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3550;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3554;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3555;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3557;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3558;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3564;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3565;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3567;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3569;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3574;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3575;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3576;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3577;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3580;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3581;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3590;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3591;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3592;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3594;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3595;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3596;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3630;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3631;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3632;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3633;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3634;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3635;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3637;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3638;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3639;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3640;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3642;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3660;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3877;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3880;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3881;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3883;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3884;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3886;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3887;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_3888;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4230;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4231;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4232;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4234;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4235;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4427;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_4428;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_5087;
import static de.ehex.foss.gematik.specifications.gemSpec_X_509_TSP.AFOs.TIP1_A_5376;
import static de.ehex.foss.gematik.specifications.gemSpec_eGK_Fach_VSDM.AFOs.VSDM_A_2973;
import static de.ehex.foss.gematik.specifications.gemSpec_eGK_Fach_VSDM.AFOs.VSDM_A_2974;
import static de.ehex.foss.gematik.specifications.gemSpec_eGK_Fach_VSDM.AFOs.VSDM_A_2975;
import static de.ehex.foss.gematik.specifications.gemSpec_eGK_Fach_VSDM.AFOs.VSDM_A_2976;
import static de.ehex.foss.gematik.specifications.gemSpec_eGK_Fach_VSDM.AFOs.VSDM_A_2994;
import static java.util.Arrays.asList;
import static java.util.Collections.unmodifiableSet;
import static java.util.Objects.nonNull;
import static java.util.stream.Collectors.toSet;
import static java.util.stream.Stream.concat;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Enumeration of all – currently considered – gematik {@link PTStB}s.
*
* @author Stefan Gasterstädt
* @since September 21st, 2016
*/
public enum PTStBs implements PTStB {
gemProdT_CM_KOMLE_PTV1_2_0("gemProdT_CM_KOMLE_PTV1.2.0",
asList(KOM_LE_A_2004, KOM_LE_A_2012, KOM_LE_A_2013, KOM_LE_A_2016, KOM_LE_A_2017, KOM_LE_A_2176, KOM_LE_A_2021, KOM_LE_A_2022, KOM_LE_A_2178, KOM_LE_A_2192, KOM_LE_A_2024, KOM_LE_A_2025, KOM_LE_A_2028, KOM_LE_A_2034,
KOM_LE_A_2038, KOM_LE_A_2039, KOM_LE_A_2042, KOM_LE_A_2179, KOM_LE_A_2046, KOM_LE_A_2047, KOM_LE_A_2048, KOM_LE_A_2049, KOM_LE_A_2180, KOM_LE_A_2050, KOM_LE_A_2057, KOM_LE_A_2058, KOM_LE_A_2062, KOM_LE_A_2063,
KOM_LE_A_2066, KOM_LE_A_2067, KOM_LE_A_2069, KOM_LE_A_2181, KOM_LE_A_2070, KOM_LE_A_2071, KOM_LE_A_2072, KOM_LE_A_2074, KOM_LE_A_2079, KOM_LE_A_2080, KOM_LE_A_2081, KOM_LE_A_2082, KOM_LE_A_2083, KOM_LE_A_2084,
KOM_LE_A_2085, KOM_LE_A_2086, KOM_LE_A_2087, KOM_LE_A_2088, KOM_LE_A_2089, KOM_LE_A_2090, KOM_LE_A_2091, KOM_LE_A_2184, KOM_LE_A_2092, KOM_LE_A_2225, KOM_LE_A_2230, KOM_LE_A_2094, GS_A_4359, GS_A_4385, GS_A_4386,
GS_A_5035, GS_A_5038, GS_A_5136, TIP1_A_4126, TIP1_A_4157),
asList(GS_A_2350, GS_A_2354, GS_A_2524, GS_A_2525, GS_A_3695, GS_A_3696, GS_A_3697, GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3882, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888,
GS_A_3889, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905,
GS_A_3906, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_3908, GS_A_3909, GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913,
GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3919, GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965,
GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971, GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989,
GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_4000, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093,
GS_A_4094, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099, GS_A_4100, GS_A_4101, GS_A_4102, GS_A_4103, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4118, GS_A_4119, GS_A_4120, GS_A_4121, GS_A_4122,
GS_A_4123, GS_A_4124, GS_A_4125, GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4132, GS_A_4133, GS_A_4134, GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4367, GS_A_4397, GS_A_4398,
GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4413, GS_A_4414, GS_A_4415, GS_A_4416, GS_A_4417, GS_A_4418, GS_A_4419,
GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4424, GS_A_4425, GS_A_4541, GS_A_4831, GS_A_4855, GS_A_4892, GS_A_4893, GS_A_5039, GS_A_5086, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_5251, KOM_LE_A_2003, KOM_LE_A_2005, KOM_LE_A_2006,
KOM_LE_A_2007, KOM_LE_A_2008, KOM_LE_A_2009, KOM_LE_A_2010, KOM_LE_A_2011, KOM_LE_A_2014, KOM_LE_A_2015, KOM_LE_A_2018, KOM_LE_A_2019, KOM_LE_A_2020, KOM_LE_A_2023, KOM_LE_A_2026, KOM_LE_A_2027, KOM_LE_A_2029,
KOM_LE_A_2030, KOM_LE_A_2031, KOM_LE_A_2032, KOM_LE_A_2033, KOM_LE_A_2035, KOM_LE_A_2036, KOM_LE_A_2037, KOM_LE_A_2040, KOM_LE_A_2041, KOM_LE_A_2043, KOM_LE_A_2044, KOM_LE_A_2045, KOM_LE_A_2052, KOM_LE_A_2059,
KOM_LE_A_2060, KOM_LE_A_2061, KOM_LE_A_2064, KOM_LE_A_2065, KOM_LE_A_2075, KOM_LE_A_2076, KOM_LE_A_2077, KOM_LE_A_2095, KOM_LE_A_2096, KOM_LE_A_2097, KOM_LE_A_2098, KOM_LE_A_2099, KOM_LE_A_2100, KOM_LE_A_2101,
KOM_LE_A_2102, KOM_LE_A_2103, KOM_LE_A_2104, KOM_LE_A_2106, KOM_LE_A_2107, KOM_LE_A_2108, KOM_LE_A_2109, KOM_LE_A_2111, KOM_LE_A_2112, KOM_LE_A_2114, KOM_LE_A_2115, KOM_LE_A_2116, KOM_LE_A_2117, KOM_LE_A_2118,
KOM_LE_A_2119, KOM_LE_A_2121, KOM_LE_A_2122, KOM_LE_A_2123, KOM_LE_A_2124, KOM_LE_A_2125, KOM_LE_A_2126, KOM_LE_A_2127, KOM_LE_A_2128, KOM_LE_A_2129, KOM_LE_A_2177, KOM_LE_A_2182, KOM_LE_A_2190, KOM_LE_A_2191,
KOM_LE_A_2193, KOM_LE_A_2234, KOM_LE_A_2238, KOM_LE_A_2239, KOM_LE_A_2241, KOM_LE_A_2242, KOM_LE_A_2244, KOM_LE_A_2245, KOM_LE_A_2247, KOM_LE_A_2260, KOM_LE_A_2261, KOM_LE_A_2265, KOM_LE_A_2268, TIP1_A_2730, TIP1_A_2769,
TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, TIP1_A_4121, TIP1_A_4122, TIP1_A_4127, TIP1_A_4132, TIP1_A_4133, TIP1_A_4149, TIP1_A_4158, TIP1_A_4929, TIP1_A_5052)),
gemProdT_FD_KOMLE_PTV1_2_0("gemProdT_FD_KOMLE_PTV1.2.0",
asList(TIP1_A_4126, TIP1_A_4157, KOM_LE_A_2130, KOM_LE_A_2131, KOM_LE_A_2132, KOM_LE_A_2223, KOM_LE_A_2224, KOM_LE_A_2278, KOM_LE_A_2277, KOM_LE_A_2134, KOM_LE_A_2135, KOM_LE_A_2136, KOM_LE_A_2139, KOM_LE_A_2140, KOM_LE_A_2141,
KOM_LE_A_2186, KOM_LE_A_2142, KOM_LE_A_2145, KOM_LE_A_2146, KOM_LE_A_2147, KOM_LE_A_2149, KOM_LE_A_2150, KOM_LE_A_2151, KOM_LE_A_2154, KOM_LE_A_2155, KOM_LE_A_2156, KOM_LE_A_2157, KOM_LE_A_2187, KOM_LE_A_2188,
KOM_LE_A_2158, KOM_LE_A_2159, KOM_LE_A_2160, KOM_LE_A_2161, KOM_LE_A_2163, KOM_LE_A_2165, KOM_LE_A_2166, KOM_LE_A_2167, KOM_LE_A_2168, KOM_LE_A_2169, KOM_LE_A_2231, GS_A_4359, GS_A_4386, GS_A_4009, GS_A_4831, GS_A_4832,
GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_4036, GS_A_4763, GS_A_3932, GS_A_3842, GS_A_3931, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3934, GS_A_4819, GS_A_3937, GS_A_3695, GS_A_3696, GS_A_5025,
GS_A_3702, GS_A_4543, GS_A_4545, GS_A_3856, GS_A_4547, GS_A_5252, GS_A_3801, GS_A_3796, GS_A_3816, GS_A_3804, GS_A_3807, GS_A_3806, GS_A_5038, GS_A_4864, GS_A_4146, GS_A_4147, GS_A_4148, GS_A_4149, GS_A_5137, GS_A_5138,
GS_A_4637, GS_A_4829, GS_A_4642, GS_A_4643, GS_A_4646, GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4651, GS_A_4898, GS_A_4899, GS_A_4652, GS_A_4653, GS_A_4654, GS_A_4655, GS_A_4656, GS_A_4657, GS_A_4660, GS_A_4749,
GS_A_4661, GS_A_4662, GS_A_4663, GS_A_4751, GS_A_4957, GS_A_5215, TIP1_A_5120, TIP1_A_5566, TIP1_A_5591, TIP1_A_5594, TIP1_A_5596, TIP1_A_5600, TIP1_A_5603, TIP1_A_5598),
asList(GS_A_2012, GS_A_2021, GS_A_2046, GS_A_2047, GS_A_2065, GS_A_2070, GS_A_2071, GS_A_2072, GS_A_2073, GS_A_2074, GS_A_2075, GS_A_2076, GS_A_2087, GS_A_2156, GS_A_2174, GS_A_2177, GS_A_2213, GS_A_2214, GS_A_2309, GS_A_2326,
GS_A_2328, GS_A_2329, GS_A_2330, GS_A_2331, GS_A_2332, GS_A_2333, GS_A_2339, GS_A_2343, GS_A_2345, GS_A_2347, GS_A_2355, GS_A_2356, GS_A_2357, GS_A_2359, GS_A_2360, GS_A_2361, GS_A_2362, GS_A_2363, GS_A_2366, GS_A_3078,
GS_A_3125, GS_A_3130, GS_A_3139, GS_A_3141, GS_A_3149, GS_A_3697, GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3756, GS_A_3760, GS_A_3772, GS_A_3784, GS_A_3805, GS_A_3813, GS_A_3824, GS_A_3839, GS_A_3841, GS_A_3876, GS_A_3877,
GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3882, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902,
GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_3908, GS_A_3909, GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3919, GS_A_3920, GS_A_3921, GS_A_3922,
GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971, GS_A_3972, GS_A_3975, GS_A_3976,
GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998,
GS_A_3999, GS_A_4000, GS_A_4018, GS_A_4027, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4094, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099, GS_A_4100, GS_A_4101,
GS_A_4102, GS_A_4103, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4118, GS_A_4119, GS_A_4120, GS_A_4121, GS_A_4122, GS_A_4123, GS_A_4124, GS_A_4125, GS_A_4126,
GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4132, GS_A_4133, GS_A_4134, GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4330, GS_A_4367, GS_A_4368, GS_A_4384, GS_A_4385, GS_A_4387, GS_A_4388,
GS_A_4397, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4413, GS_A_4414, GS_A_4415, GS_A_4416, GS_A_4417,
GS_A_4418, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423, GS_A_4424, GS_A_4425, GS_A_4435, GS_A_4436, GS_A_4437, GS_A_4448, GS_A_4449, GS_A_4450, GS_A_4451, GS_A_4453, GS_A_4455, GS_A_4456, GS_A_4457, GS_A_4458,
GS_A_4459, GS_A_4460, GS_A_4461, GS_A_4462, GS_A_4463, GS_A_4464, GS_A_4465, GS_A_4466, GS_A_4467, GS_A_4468, GS_A_4470, GS_A_4471, GS_A_4472, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4476, GS_A_4477, GS_A_4478, GS_A_4479,
GS_A_4480, GS_A_4481, GS_A_4482, GS_A_4503, GS_A_4504, GS_A_4505, GS_A_4506, GS_A_4507, GS_A_4508, GS_A_4509, GS_A_4510, GS_A_4511, GS_A_4512, GS_A_4513, GS_A_4514, GS_A_4515, GS_A_4516, GS_A_4517, GS_A_4518, GS_A_4519,
GS_A_4520, GS_A_4521, GS_A_4522, GS_A_4523, GS_A_4524, GS_A_4525, GS_A_4526, GS_A_4527, GS_A_4528, GS_A_4529, GS_A_4530, GS_A_4531, GS_A_4532, GS_A_4533, GS_A_4534, GS_A_4535, GS_A_4537, GS_A_4538, GS_A_4539, GS_A_4540,
GS_A_4541, GS_A_4640, GS_A_4641, GS_A_4720, GS_A_4748, GS_A_4759, GS_A_4805, GS_A_4808, GS_A_4855, GS_A_4892, GS_A_4893, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984,
GS_A_5017, GS_A_5033, GS_A_5035, GS_A_5039, GS_A_5040, GS_A_5054, GS_A_5134, GS_A_5139, GS_A_5143, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_5251, KOM_LE_A_2133, KOM_LE_A_2137, KOM_LE_A_2138, KOM_LE_A_2143, KOM_LE_A_2144,
KOM_LE_A_2148, KOM_LE_A_2152, KOM_LE_A_2162, KOM_LE_A_2164, KOM_LE_A_2171, KOM_LE_A_2185, KOM_LE_A_2189, KOM_LE_A_2234, KOM_LE_A_2238, KOM_LE_A_2250, KOM_LE_A_2251, KOM_LE_A_2252, KOM_LE_A_2254, KOM_LE_A_2255,
KOM_LE_A_2257, KOM_LE_A_2260, KOM_LE_A_2261, KOM_LE_A_2265, KOM_LE_A_2268, TIP1_A_2730, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, TIP1_A_4132, TIP1_A_4133, TIP1_A_4149, TIP1_A_4158, TIP1_A_4929, TIP1_A_4932,
TIP1_A_5052, TIP1_A_5568, TIP1_A_5588, TIP1_A_5589, TIP1_A_5592, TIP1_A_5601)),
gemProdT_FD_VSDM_PTV1_4_0("gemProdT_FD_VSDM_PTV1.4.0",
asList(TIP1_A_4126, TIP1_A_4157, TIP1_A_5120, GS_A_4386, GS_A_4009, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_4036, GS_A_4763, GS_A_4809, GS_A_3932, GS_A_3834, GS_A_3842, GS_A_3930, GS_A_3931,
GS_A_3839, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3933, GS_A_3934, GS_A_4819, GS_A_3937, GS_A_3695, GS_A_3696, GS_A_3702, GS_A_4543, GS_A_4545, GS_A_3856, GS_A_4547, GS_A_3801, GS_A_3796, GS_A_3816, GS_A_3804,
GS_A_3807, GS_A_3806, GS_A_3805, GS_A_5025, GS_A_5031, GS_A_5092, GS_A_5093, GS_A_5094, GS_A_5095, GS_A_4637, GS_A_4829, GS_A_4642, GS_A_4643, GS_A_4646, GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4651, GS_A_4898,
GS_A_4899, GS_A_4652, GS_A_4653, GS_A_4654, GS_A_4655, GS_A_4656, GS_A_4657, GS_A_4660, GS_A_4749, GS_A_4661, GS_A_4662, GS_A_4663, GS_A_4751, GS_A_4957, GS_A_5215, VSDM_A_2226, VSDM_A_2241, VSDM_A_2242, VSDM_A_2253,
VSDM_A_2254, VSDM_A_2255, VSDM_A_2256, VSDM_A_2257, VSDM_A_2258, VSDM_A_2259, VSDM_A_2260, VSDM_A_2261, VSDM_A_2262, VSDM_A_2263, VSDM_A_2266, VSDM_A_2267, VSDM_A_2269, VSDM_A_2270, VSDM_A_2272, VSDM_A_2273, VSDM_A_2274,
VSDM_A_2279, VSDM_A_2313, VSDM_A_2902, VSDM_A_2952, VSDM_A_2955, VSDM_A_3001, VSDM_A_2297, VSDM_A_2299, VSDM_A_2301, VSDM_A_2302, VSDM_A_2305, VSDM_A_2314, VSDM_A_2315, VSDM_A_2316, VSDM_A_2317, VSDM_A_2322, VSDM_A_2323,
VSDM_A_2324, VSDM_A_2325, VSDM_A_2326, VSDM_A_2327, VSDM_A_2328, VSDM_A_2330, VSDM_A_2331, VSDM_A_2332, VSDM_A_2333, VSDM_A_2334, VSDM_A_2339, VSDM_A_2264, VSDM_A_2294, VSDM_A_2341, VSDM_A_2546, VSDM_A_2961, VSDM_A_2973,
VSDM_A_2974, VSDM_A_2975, VSDM_A_2976, VSDM_A_2994, VSDM_A_3009, VSDM_A_2751, VSDM_A_2329, VSDM_A_2280, VSDM_A_2281, VSDM_A_2283, VSDM_A_2286, VSDM_A_2287, VSDM_A_2288, VSDM_A_2290, VSDM_A_2291, VSDM_A_2292, VSDM_A_2293,
VSDM_A_2342, VSDM_A_2295),
asList(TIP1_A_4133, TIP1_A_4149, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, VSDM_A_2800, VSDM_A_2802, VSDM_A_2803, VSDM_A_2813, VSDM_A_2814, VSDM_A_2815, VSDM_A_2824, VSDM_A_2825, VSDM_A_2830, VSDM_A_2831, VSDM_A_2832,
VSDM_A_2846, VSDM_A_2853, VSDM_A_2854, VSDM_A_2858, VSDM_A_2862, VSDM_A_2958, GS_A_3824, GS_A_4018, GS_A_4027, GS_A_4759, GS_A_3839, GS_A_5089, GS_A_3695, GS_A_3696, GS_A_3697, GS_A_4541, GS_A_4831, GS_A_5054, GS_A_5038,
GS_A_5039, GS_A_5040, GS_A_5018, GS_A_5032, GS_A_5095, GS_A_4640, GS_A_5033, GS_A_3805, GS_A_3813, GS_A_3928, TIP1_A_4158, VSDM_A_2268, VSDM_A_2999, VSDM_A_2827, VSDM_A_2828, VSDM_A_2812, VSDM_A_2826, VSDM_A_2829,
TIP1_A_4932, GS_A_4330, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4479, GS_A_4523, GS_A_4524, GS_A_4528, GS_A_4529, GS_A_4384, GS_A_4385, GS_A_4386, GS_A_4387, GS_A_5035, GS_A_3839, GS_A_3841, GS_A_4808, GS_A_4641,
GS_A_4748, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784, GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2087, GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021,
GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947, GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_2331, GS_A_2332, GS_A_2345, GS_A_2347, GS_A_2356, GS_A_2361, GS_A_2363, GS_A_2366, GS_A_4378,
GS_A_4380, GS_A_4368, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099, GS_A_4094, GS_A_4100, GS_A_4101, GS_A_4102,
GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4415, GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423,
GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119,
GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896,
GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_3909, GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919,
GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971,
GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000,
GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_4121, GS_A_4122, GS_A_4123, GS_A_4124, GS_A_4125, GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134,
GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4805, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_5250, GS_A_5251, VSDM_A_2802, VSDM_A_2912, VSDM_A_2914, VSDM_A_2913, VSDM_A_2915, VSDM_A_2916, VSDM_A_2917,
VSDM_A_2918, VSDM_A_2919, VSDM_A_2920, VSDM_A_2921, VSDM_A_2922, VSDM_A_2923, TIP1_A_2730, GS_A_4720)),
gemProdT_FD_VSDM_PTV1_5_0("gemProdT_FD_VSDM_PTV1.5.0",
asList(TIP1_A_4126, TIP1_A_4157, TIP1_A_5120, GS_A_4386, GS_A_4009, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_4036, GS_A_4763, GS_A_4809, GS_A_3932, GS_A_3834, GS_A_3842, GS_A_3930, GS_A_3931,
GS_A_3839, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3933, GS_A_3934, GS_A_4819, GS_A_3937, GS_A_3695, GS_A_3696, GS_A_3702, GS_A_4543, GS_A_4545, GS_A_3856, GS_A_4547, GS_A_3801, GS_A_3796, GS_A_3816, GS_A_3804,
GS_A_3807, GS_A_3806, GS_A_3805, GS_A_5025, GS_A_5031, GS_A_5092, GS_A_5093, GS_A_5094, GS_A_5095, GS_A_4637, GS_A_4829, GS_A_4642, GS_A_4643, GS_A_4646, GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4651, GS_A_4898,
GS_A_4899, GS_A_4652, GS_A_4653, GS_A_4654, GS_A_4655, GS_A_4656, GS_A_4657, GS_A_4660, GS_A_4749, GS_A_4661, GS_A_4662, GS_A_4663, GS_A_4751, GS_A_4957, GS_A_5077, GS_A_5215, GS_A_5336, VSDM_A_2226, VSDM_A_2241,
VSDM_A_2242, VSDM_A_2253, VSDM_A_2254, VSDM_A_2255, VSDM_A_2256, VSDM_A_2257, VSDM_A_2258, VSDM_A_2259, VSDM_A_2260, VSDM_A_2261, VSDM_A_2262, VSDM_A_2263, VSDM_A_2266, VSDM_A_2267, VSDM_A_2269, VSDM_A_2270, VSDM_A_2272,
VSDM_A_2273, VSDM_A_2274, VSDM_A_2279, VSDM_A_2313, VSDM_A_2902, VSDM_A_2952, VSDM_A_2955, VSDM_A_3001, VSDM_A_2297, VSDM_A_2299, VSDM_A_2301, VSDM_A_2302, VSDM_A_2305, VSDM_A_2314, VSDM_A_2315, VSDM_A_2316, VSDM_A_2317,
VSDM_A_2322, VSDM_A_2323, VSDM_A_2324, VSDM_A_2325, VSDM_A_2326, VSDM_A_2327, VSDM_A_2328, VSDM_A_2330, VSDM_A_2331, VSDM_A_2332, VSDM_A_2333, VSDM_A_2334, VSDM_A_2339, VSDM_A_2264, VSDM_A_2294, VSDM_A_2341, VSDM_A_2546,
VSDM_A_2961, VSDM_A_2973, VSDM_A_2974, VSDM_A_2975, VSDM_A_2976, VSDM_A_2994, VSDM_A_3009, VSDM_A_2751, VSDM_A_2329, VSDM_A_2280, VSDM_A_2281, VSDM_A_2283, VSDM_A_2286, VSDM_A_2287, VSDM_A_2288, VSDM_A_2290, VSDM_A_2291,
VSDM_A_2292, VSDM_A_2293, VSDM_A_2342, VSDM_A_2295),
asList(TIP1_A_4133, TIP1_A_4149, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, VSDM_A_2800, VSDM_A_2802, VSDM_A_2803, VSDM_A_2813, VSDM_A_2814, VSDM_A_2815, VSDM_A_2824, VSDM_A_2825, VSDM_A_2830, VSDM_A_2831, VSDM_A_2832,
VSDM_A_2846, VSDM_A_2853, VSDM_A_2854, VSDM_A_2858, VSDM_A_2862, VSDM_A_2958, GS_A_3824, GS_A_4018, GS_A_4027, GS_A_4759, GS_A_3839, GS_A_5089, GS_A_3695, GS_A_3696, GS_A_3697, GS_A_4541, GS_A_4831, GS_A_5054, GS_A_5038,
GS_A_5039, GS_A_5040, GS_A_5018, GS_A_5032, GS_A_5095, GS_A_4640, GS_A_5033, GS_A_3805, GS_A_3813, GS_A_3928, TIP1_A_4158, VSDM_A_2268, VSDM_A_2999, VSDM_A_2827, VSDM_A_2828, VSDM_A_2812, VSDM_A_2826, VSDM_A_2829,
TIP1_A_4932, GS_A_4330, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4479, GS_A_4523, GS_A_4524, GS_A_4528, GS_A_4529, GS_A_4384, GS_A_4385, GS_A_4386, GS_A_4387, GS_A_5035, GS_A_3839, GS_A_3841, GS_A_4808, GS_A_4641,
GS_A_4748, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784, GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2087, GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021,
GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947, GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_2331, GS_A_2332, GS_A_2345, GS_A_2347, GS_A_2356, GS_A_2361, GS_A_2363, GS_A_2366, GS_A_4378,
GS_A_4380, GS_A_4368, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099, GS_A_4094, GS_A_4100, GS_A_4101, GS_A_4102,
GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4415, GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423,
GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119,
GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896,
GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_3909, GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919,
GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971,
GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000,
GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_4121, GS_A_4122, GS_A_4123, GS_A_4124, GS_A_4125, GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134,
GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4805, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_5250, GS_A_5251, VSDM_A_2802, VSDM_A_2912, VSDM_A_2914, VSDM_A_2913, VSDM_A_2915, VSDM_A_2916, VSDM_A_2917,
VSDM_A_2918, VSDM_A_2919, VSDM_A_2920, VSDM_A_2921, VSDM_A_2922, VSDM_A_2923, TIP1_A_2730, GS_A_4720)),
gemProdT_FD_VSDM_PTV1_5_0_1("gemProdT_FD_VSDM_PTV1.5.0-1",
asList(TIP1_A_4126, TIP1_A_4157, TIP1_A_5120, TIP1_A_5993, TIP1_A_5996, TIP1_A_5997, TIP1_A_5998, TIP1_A_6003, GS_A_4386, GS_A_4009, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_4036, GS_A_4763, GS_A_4809, GS_A_3932, GS_A_3834, GS_A_3842, GS_A_3930, GS_A_3931,
GS_A_3839, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3933, GS_A_3934, GS_A_4819, GS_A_3937, GS_A_3695, GS_A_3696, GS_A_3702, GS_A_4543, GS_A_4545, GS_A_3856, GS_A_4547, GS_A_3801, GS_A_3796, GS_A_3816, GS_A_3804,
GS_A_3807, GS_A_3806, GS_A_3805, GS_A_5025, GS_A_5031, GS_A_5092, GS_A_5093, GS_A_5094, GS_A_5095, GS_A_4637, GS_A_4829, GS_A_4642, GS_A_4643, GS_A_4646, GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4651, GS_A_4898,
GS_A_4899, GS_A_4652, GS_A_4653, GS_A_4654, GS_A_4655, GS_A_4656, GS_A_4657, GS_A_4660, GS_A_4749, GS_A_4661, GS_A_4662, GS_A_4663, GS_A_4751, GS_A_4957, GS_A_5077, GS_A_5215, GS_A_5336, VSDM_A_2226, VSDM_A_2241,
VSDM_A_2242, VSDM_A_2253, VSDM_A_2254, VSDM_A_2255, VSDM_A_2256, VSDM_A_2257, VSDM_A_2258, VSDM_A_2259, VSDM_A_2260, VSDM_A_2261, VSDM_A_2262, VSDM_A_2263, VSDM_A_2266, VSDM_A_2267, VSDM_A_2269, VSDM_A_2270, VSDM_A_2272,
VSDM_A_2273, VSDM_A_2274, VSDM_A_2279, VSDM_A_2313, VSDM_A_2902, VSDM_A_2952, VSDM_A_2955, VSDM_A_3001, VSDM_A_2297, VSDM_A_2299, VSDM_A_2301, VSDM_A_2302, VSDM_A_2305, VSDM_A_2314, VSDM_A_2315, VSDM_A_2316, VSDM_A_2317,
VSDM_A_2322, VSDM_A_2323, VSDM_A_2324, VSDM_A_2325, VSDM_A_2326, VSDM_A_2327, VSDM_A_2328, VSDM_A_2330, VSDM_A_2331, VSDM_A_2332, VSDM_A_2333, VSDM_A_2334, VSDM_A_2339, VSDM_A_2264, VSDM_A_2294, VSDM_A_2341, VSDM_A_2546,
VSDM_A_2961, VSDM_A_2973, VSDM_A_2974, VSDM_A_2975, VSDM_A_2976, VSDM_A_2994, VSDM_A_3009, VSDM_A_2751, VSDM_A_2329, VSDM_A_2280, VSDM_A_2281, VSDM_A_2283, VSDM_A_2286, VSDM_A_2287, VSDM_A_2288, VSDM_A_2290, VSDM_A_2291,
VSDM_A_2292, VSDM_A_2293, VSDM_A_2342, VSDM_A_2295),
asList(TIP1_A_4133, TIP1_A_4149, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, VSDM_A_2800, VSDM_A_2802, VSDM_A_2803, VSDM_A_2813, VSDM_A_2814, VSDM_A_2815, VSDM_A_2824, VSDM_A_2825, VSDM_A_2830, VSDM_A_2831, VSDM_A_2832,
VSDM_A_2846, VSDM_A_2853, VSDM_A_2854, VSDM_A_2858, VSDM_A_2862, VSDM_A_2958, GS_A_3824, GS_A_4018, GS_A_4027, GS_A_4759, GS_A_3839, GS_A_5089, GS_A_3695, GS_A_3696, GS_A_3697, GS_A_4541, GS_A_4831, GS_A_5054, GS_A_5038,
GS_A_5039, GS_A_5040, GS_A_5018, GS_A_5032, GS_A_5095, GS_A_4640, GS_A_5033, GS_A_3805, GS_A_3813, GS_A_3928, TIP1_A_4158, VSDM_A_2268, VSDM_A_2999, VSDM_A_2827, VSDM_A_2828, VSDM_A_2812, VSDM_A_2826, VSDM_A_2829,
TIP1_A_4932, GS_A_4330, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4479, GS_A_4523, GS_A_4524, GS_A_4528, GS_A_4529, GS_A_4384, GS_A_4385, GS_A_4386, GS_A_4387, GS_A_5035, GS_A_3839, GS_A_3841, GS_A_4808, GS_A_4641,
GS_A_4748, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784, GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2087, GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021,
GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947, GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_2331, GS_A_2332, GS_A_2345, GS_A_2347, GS_A_2356, GS_A_2361, GS_A_2363, GS_A_2366, GS_A_4378,
GS_A_4380, GS_A_4368, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099, GS_A_4094, GS_A_4100, GS_A_4101, GS_A_4102,
GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4415, GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423,
GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119,
GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896,
GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_3909, GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919,
GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971,
GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000,
GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_4121, GS_A_4122, GS_A_4123, GS_A_4124, GS_A_4125, GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134,
GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4805, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_5250, GS_A_5251, VSDM_A_2802, VSDM_A_2912, VSDM_A_2914, VSDM_A_2913, VSDM_A_2915, VSDM_A_2916, VSDM_A_2917,
VSDM_A_2918, VSDM_A_2919, VSDM_A_2920, VSDM_A_2921, VSDM_A_2922, VSDM_A_2923, TIP1_A_2730, GS_A_4720)),
gemProdT_X_509_TSP_nonQES_eGK_PTV1_5_0_V1_0_0("gemProdT_X.509_TSP_nonQES_eGK_PTV1.5.0",
asList(GS_A_4009, GS_A_4831, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_3834, GS_A_3842, GS_A_4810, GS_A_3931, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3934, GS_A_3937, GS_A_3939, GS_A_3946,
GS_A_5038, GS_A_4146, GS_A_4147, GS_A_4148, GS_A_4149, GS_A_4145, GS_A_4159, GS_A_4160, GS_A_4721, GS_A_4669, GS_A_4673, GS_A_4675, GS_A_4677, GS_A_4678, GS_A_4679, GS_A_4684, GS_A_4686, GS_A_4687, GS_A_4688, GS_A_4690,
GS_A_4691, GS_A_4692, GS_A_5090, GS_A_4694, TIP1_A_3888),
asList(TIP1_A_4121, TIP1_A_4122, TIP1_A_4126, TIP1_A_4127, TIP1_A_4132, TIP1_A_4133, TIP1_A_4157, TIP1_A_4158, TIP1_A_2730, TIP1_A_5052, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, TIP1_A_3212, GS_A_4177, GS_A_4178,
GS_A_4179, GS_A_4180, GS_A_4181, GS_A_4182, GS_A_4183, GS_A_4185, GS_A_4208, GS_A_4906, GS_A_4395, GS_A_4213, GS_A_4348, GS_A_4219, GS_A_4225, GS_A_4228, GS_A_4229, GS_A_4234, GS_A_4237, GS_A_4250, GS_A_4251, GS_A_4283,
GS_A_4302, GS_A_4303, GS_A_4355, GS_A_4352, GS_A_4909, GS_A_4910, GS_A_4911, GS_A_4912, GS_A_4913, GS_A_4914, GS_A_4915, GS_A_4916, GS_A_4917, GS_A_4919, GS_A_4923, GS_A_4924, GS_A_4926, GS_A_4927, GS_A_4928, GS_A_4929,
GS_A_4931, GS_A_4933, GS_A_4831, GS_A_4010, GS_A_4011, GS_A_4012, GS_A_4759, GS_A_4036, GS_A_4763, GS_A_3824, GS_A_3932, GS_A_4810, GS_A_3931, GS_A_4820, GS_A_4442, GS_A_4444, GS_A_4445, GS_A_3695, GS_A_3696, GS_A_3697,
GS_A_4541, GS_A_5025, GS_A_5039, GS_A_3702, GS_A_4543, GS_A_4545, GS_A_3813, GS_A_3804, GS_A_3807, GS_A_3805, GS_A_3806, GS_A_5033, GS_A_3055, GS_A_3058, GS_A_4705, GS_A_4706, GS_A_4572, GS_A_4573, GS_A_4575, GS_A_4576,
GS_A_4577, GS_A_4714, GS_A_4715, GS_A_4716, GS_A_4717, GS_A_4718, GS_A_4719, GS_A_4722, GS_A_4723, GS_A_4724, GS_A_4588, GS_A_4590, GS_A_4595, GS_A_4596, GS_A_4598, GS_A_4599, GS_A_4730, GS_A_4731, GS_A_4732, GS_A_4735,
GS_A_4737, GS_A_4902, GS_A_4738, GS_A_4739, GS_A_4740, GS_A_4741, GS_A_4935, GS_A_4936, GS_A_4685, GS_A_4689, TIP1_A_3555, TIP1_A_3558, TIP1_A_3574, TIP1_A_3575, TIP1_A_3576, TIP1_A_3577, TIP1_A_3591, TIP1_A_3886,
TIP1_A_3594, TIP1_A_3639, TIP1_A_3640, TIP1_A_4932, GS_A_4173, GS_A_4191, GS_A_4230, GS_A_4396, GS_A_4243, GS_A_4247, GS_A_4249, GS_A_4255, GS_A_4259, GS_A_4260, GS_A_4261, GS_A_4268, GS_A_4270, GS_A_4271, GS_A_4272,
GS_A_4273, GS_A_4274, GS_A_4275, GS_A_4276, GS_A_4279, GS_A_4284, GS_A_4285, GS_A_4287, GS_A_4288, GS_A_4289, GS_A_4290, GS_A_4291, GS_A_4292, GS_A_4294, GS_A_4295, GS_A_4304, GS_A_4305, GS_A_4306, GS_A_4307, GS_A_4308,
GS_A_4309, GS_A_4310, GS_A_4311, GS_A_4312, GS_A_4313, GS_A_4314, GS_A_4315, GS_A_4316, GS_A_4317, GS_A_4925, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4479, GS_A_4523, GS_A_4524, GS_A_4357, GS_A_4361, GS_A_4362, GS_A_4367,
GS_A_4368, GS_A_4388, GS_A_4393, GS_A_5131, GS_A_5079, GS_A_4817, GS_A_4574, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784, GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2087,
GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021, GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947, GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_2331, GS_A_2332, GS_A_2345,
GS_A_2347, GS_A_2356, GS_A_2361, GS_A_2363, GS_A_2366, TIP1_A_5087, TIP1_A_4230, TIP1_A_4231, TIP1_A_4232, TIP1_A_4234, TIP1_A_4235, TIP1_A_3660, TIP1_A_3548, TIP1_A_3549, TIP1_A_3550, TIP1_A_3881, TIP1_A_3554,
TIP1_A_3555, TIP1_A_3557, TIP1_A_3590, TIP1_A_3595, TIP1_A_3596, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099,
GS_A_4094, GS_A_4855, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_4100, GS_A_4101, GS_A_4102, GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4415,
GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423, GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4112,
GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119, GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888,
GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_5250, GS_A_3909, GS_A_3910, GS_A_3911,
GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919, GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962,
GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971, GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986,
GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000, GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_5251, GS_A_4121, GS_A_4122, GS_A_4123, GS_A_4124, GS_A_4125,
GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134, GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, TIP1_A_5060, TIP1_A_5061, TIP1_A_5062, TIP1_A_4911,
TIP1_A_4912, TIP1_A_4913, TIP1_A_4914, TIP1_A_4915, TIP1_A_4916, TIP1_A_4918, TIP1_A_4919, TIP1_A_4920, TIP1_A_4149, TIP1_A_2805, GS_A_4173, GS_A_4174, GS_A_4175, GS_A_4176, GS_A_4186, GS_A_4187, GS_A_4188, GS_A_4189,
GS_A_4190, GS_A_4192, GS_A_4195, GS_A_4199, GS_A_4201, GS_A_4202, GS_A_5083, GS_A_4203, GS_A_4205, GS_A_4207, GS_A_4209, GS_A_4394, GS_A_4210, GS_A_4211, GS_A_4212, GS_A_4214, GS_A_4215, GS_A_4216, GS_A_4217, GS_A_4218,
GS_A_4221, GS_A_4349, GS_A_4226, GS_A_4227, GS_A_4230, GS_A_4231, GS_A_4234, GS_A_4235, GS_A_4236, GS_A_4238, GS_A_4241, GS_A_4242, GS_A_4245, GS_A_4248, GS_A_4250, GS_A_4252, GS_A_4254, GS_A_4256, GS_A_4257, GS_A_4262,
GS_A_5084, GS_A_4263, GS_A_4264, GS_A_4265, GS_A_4266, GS_A_4267, GS_A_4269, GS_A_4276, GS_A_4277, GS_A_4278, GS_A_4281, GS_A_4282, GS_A_4296, GS_A_4297, GS_A_4299, GS_A_4300, GS_A_4318, GS_A_4319, GS_A_4321, GS_A_4322,
GS_A_4323, GS_A_4324, GS_A_4325, GS_A_4326, GS_A_4327, GS_A_4328, GS_A_4332, GS_A_4908, GS_A_4925, GS_A_4927, GS_A_4930, GS_A_4018, GS_A_4027, GS_A_4805, GS_A_5018, GS_A_4149, GS_A_4155, GS_A_5028, GS_A_4697, GS_A_4704,
GS_A_4727, GS_A_4670, GS_A_4680, TIP1_A_3547, TIP1_A_3877, TIP1_A_3880, TIP1_A_4427, TIP1_A_4428, TIP1_A_3630, TIP1_A_5376, TIP1_A_3883, TIP1_A_3558, TIP1_A_3564, TIP1_A_3565, TIP1_A_3567, TIP1_A_3569, TIP1_A_3580,
TIP1_A_3581, TIP1_A_3884, TIP1_A_3592, TIP1_A_3887, TIP1_A_3596, TIP1_A_3631, TIP1_A_3632, TIP1_A_3633, TIP1_A_3634, TIP1_A_3635, TIP1_A_3637, TIP1_A_3638, TIP1_A_3642)),
gemProdT_X_509_TSP_nonQES_eGK_PTV1_6_0_V1_2_1("gemProdT_X.509_TSP_nonQES_eGK_PTV1.6.0",
asList(GS_A_4009, GS_A_4831, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_3834, GS_A_3842, GS_A_4810, GS_A_3931, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3934, GS_A_3937, GS_A_3939, GS_A_3946,
GS_A_5038, GS_A_4146, GS_A_4147, GS_A_4148, GS_A_4149, GS_A_4145, GS_A_4159, GS_A_4160, GS_A_4721, GS_A_4669, GS_A_4673, GS_A_4675, GS_A_4677, GS_A_4678, GS_A_4679, GS_A_4684, GS_A_4686, GS_A_4687, GS_A_4688, GS_A_4690,
GS_A_4691, GS_A_4692, GS_A_5077, GS_A_5090, GS_A_4694, TIP1_A_3888),
asList(TIP1_A_4121, TIP1_A_4122, TIP1_A_4126, TIP1_A_4127, TIP1_A_4132, TIP1_A_4133, TIP1_A_4157, TIP1_A_4158, TIP1_A_2730, TIP1_A_5052, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, TIP1_A_3212, GS_A_4177, GS_A_4178,
GS_A_4179, GS_A_4180, GS_A_4181, GS_A_4182, GS_A_4183, GS_A_4185, GS_A_4208, GS_A_4906, GS_A_4395, GS_A_4213, GS_A_4348, GS_A_4219, GS_A_4225, GS_A_4228, GS_A_4229, GS_A_4234, GS_A_4237, GS_A_4250, GS_A_4251, GS_A_4283,
GS_A_4302, GS_A_4303, GS_A_4355, GS_A_4352, GS_A_4909, GS_A_4910, GS_A_4911, GS_A_4912, GS_A_4913, GS_A_4914, GS_A_4915, GS_A_4916, GS_A_4917, GS_A_4919, GS_A_4923, GS_A_4924, GS_A_4926, GS_A_4927, GS_A_4928, GS_A_4929,
GS_A_4931, GS_A_4933, GS_A_4831, GS_A_4010, GS_A_4011, GS_A_4012, GS_A_4759, GS_A_4036, GS_A_4763, GS_A_3824, GS_A_3932, GS_A_4810, GS_A_3931, GS_A_4820, GS_A_4442, GS_A_4444, GS_A_4445, GS_A_3695, GS_A_3696, GS_A_3697,
GS_A_4541, GS_A_5025, GS_A_5039, GS_A_3702, GS_A_4543, GS_A_4545, GS_A_3813, GS_A_3804, GS_A_3807, GS_A_3805, GS_A_3806, GS_A_5033, GS_A_3055, GS_A_3058, GS_A_4705, GS_A_4706, GS_A_4572, GS_A_4573, GS_A_4575, GS_A_4576,
GS_A_4577, GS_A_4714, GS_A_4715, GS_A_4716, GS_A_4717, GS_A_4718, GS_A_4719, GS_A_4722, GS_A_4723, GS_A_4724, GS_A_4588, GS_A_4590, GS_A_4595, GS_A_4596, GS_A_4598, GS_A_4599, GS_A_4730, GS_A_4731, GS_A_4732, GS_A_4735,
GS_A_4737, GS_A_4902, GS_A_4738, GS_A_4739, GS_A_4740, GS_A_4741, GS_A_4935, GS_A_4936, GS_A_4685, GS_A_4689, TIP1_A_3555, TIP1_A_3558, TIP1_A_3574, TIP1_A_3575, TIP1_A_3576, TIP1_A_3577, TIP1_A_3591, TIP1_A_3886,
TIP1_A_3594, TIP1_A_3639, TIP1_A_3640, TIP1_A_4932, GS_A_4173, GS_A_4191, GS_A_4230, GS_A_4396, GS_A_4243, GS_A_4247, GS_A_4249, GS_A_4255, GS_A_4259, GS_A_4260, GS_A_4261, GS_A_4268, GS_A_4270, GS_A_4271, GS_A_4272,
GS_A_4273, GS_A_4274, GS_A_4275, GS_A_4276, GS_A_4279, GS_A_4284, GS_A_4285, GS_A_4287, GS_A_4288, GS_A_4289, GS_A_4290, GS_A_4291, GS_A_4292, GS_A_4294, GS_A_4295, GS_A_4304, GS_A_4305, GS_A_4306, GS_A_4307, GS_A_4308,
GS_A_4309, GS_A_4310, GS_A_4311, GS_A_4312, GS_A_4313, GS_A_4314, GS_A_4315, GS_A_4316, GS_A_4317, GS_A_4925, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4479, GS_A_4523, GS_A_4524, GS_A_4357, GS_A_4361, GS_A_4362, GS_A_4367,
GS_A_4368, GS_A_4388, GS_A_4393, GS_A_5131, GS_A_5079, GS_A_4817, GS_A_4574, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784, GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2087,
GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021, GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947, GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_2331, GS_A_2332, GS_A_2345,
GS_A_2347, GS_A_2356, GS_A_2361, GS_A_2363, GS_A_2366, TIP1_A_5087, TIP1_A_4230, TIP1_A_4231, TIP1_A_4232, TIP1_A_4234, TIP1_A_4235, TIP1_A_3660, TIP1_A_3548, TIP1_A_3549, TIP1_A_3550, TIP1_A_3881, TIP1_A_3554,
TIP1_A_3555, TIP1_A_3557, TIP1_A_3590, TIP1_A_3595, TIP1_A_3596, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096, GS_A_4097, GS_A_4099,
GS_A_4094, GS_A_4855, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_4100, GS_A_4101, GS_A_4102, GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401, GS_A_4402, GS_A_4415,
GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423, GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411, GS_A_4412, GS_A_4112,
GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119, GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886, GS_A_3887, GS_A_3888,
GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_5250, GS_A_3909, GS_A_3910, GS_A_3911,
GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919, GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960, GS_A_3961, GS_A_3962,
GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971, GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984, GS_A_3985, GS_A_3986,
GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000, GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_5251, GS_A_4121, GS_A_4122, GS_A_4123, GS_A_4124, GS_A_4125,
GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134, GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, TIP1_A_5060, TIP1_A_5061, TIP1_A_5062, TIP1_A_4911,
TIP1_A_4912, TIP1_A_4913, TIP1_A_4914, TIP1_A_4915, TIP1_A_4916, TIP1_A_4918, TIP1_A_4919, TIP1_A_4920, TIP1_A_4149, TIP1_A_2805, GS_A_4173, GS_A_4174, GS_A_4175, GS_A_4176, GS_A_4186, GS_A_4187, GS_A_4188, GS_A_4189,
GS_A_4190, GS_A_4192, GS_A_4195, GS_A_4199, GS_A_4201, GS_A_4202, GS_A_5083, GS_A_4203, GS_A_4205, GS_A_4207, GS_A_4209, GS_A_4394, GS_A_4210, GS_A_4211, GS_A_4212, GS_A_4214, GS_A_4215, GS_A_4216, GS_A_4217, GS_A_4218,
GS_A_4221, GS_A_4349, GS_A_4226, GS_A_4227, GS_A_4230, GS_A_4231, GS_A_4234, GS_A_4235, GS_A_4236, GS_A_4238, GS_A_4241, GS_A_4242, GS_A_4245, GS_A_4248, GS_A_4250, GS_A_4252, GS_A_4254, GS_A_4256, GS_A_4257, GS_A_4262,
GS_A_5084, GS_A_4263, GS_A_4264, GS_A_4265, GS_A_4266, GS_A_4267, GS_A_4269, GS_A_4276, GS_A_4277, GS_A_4278, GS_A_4281, GS_A_4282, GS_A_4296, GS_A_4297, GS_A_4299, GS_A_4300, GS_A_4318, GS_A_4319, GS_A_4321, GS_A_4322,
GS_A_4323, GS_A_4324, GS_A_4325, GS_A_4326, GS_A_4327, GS_A_4328, GS_A_4332, GS_A_4908, GS_A_4925, GS_A_4927, GS_A_4930, GS_A_4018, GS_A_4027, GS_A_4805, GS_A_5018, GS_A_4149, GS_A_4155, GS_A_5028, GS_A_4697, GS_A_4704,
GS_A_4727, GS_A_4670, GS_A_4680, TIP1_A_3547, TIP1_A_3877, TIP1_A_3880, TIP1_A_4427, TIP1_A_4428, TIP1_A_3630, TIP1_A_5376, TIP1_A_3883, TIP1_A_3558, TIP1_A_3564, TIP1_A_3565, TIP1_A_3567, TIP1_A_3569, TIP1_A_3580,
TIP1_A_3581, TIP1_A_3884, TIP1_A_3592, TIP1_A_3887, TIP1_A_3596, TIP1_A_3631, TIP1_A_3632, TIP1_A_3633, TIP1_A_3634, TIP1_A_3635, TIP1_A_3637, TIP1_A_3638, TIP1_A_3642)),
gemProdT_Intermediaer_VSDM_PTV140_V100("gemProdT_Intermediär_VSDM_PTV1.4.0",
asList(TIP1_A_4126, TIP1_A_4157, VSDM_A_2348, VSDM_A_2349, VSDM_A_2350, VSDM_A_2351, VSDM_A_2353, VSDM_A_2356, VSDM_A_2357, VSDM_A_2358, VSDM_A_2359, VSDM_A_2547, VSDM_A_2548, VSDM_A_2549, VSDM_A_2550, VSDM_A_2673, VSDM_A_2704,
VSDM_A_2706, VSDM_A_2707, VSDM_A_2712, VSDM_A_2747, VSDM_A_2761, VSDM_A_2940, VSDM_A_3022, VSDM_A_3023, GS_A_4386, GS_A_4009, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_4036, GS_A_4763,
GS_A_4809, GS_A_3932, GS_A_3834, GS_A_3842, GS_A_3930, GS_A_3931, GS_A_3839, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3933, GS_A_3934, GS_A_4819, GS_A_3937, GS_A_3695, GS_A_3696, GS_A_5025, GS_A_5038, GS_A_3702,
GS_A_4543, GS_A_4545, GS_A_3856, GS_A_4547, GS_A_3801, GS_A_3796, GS_A_3816, GS_A_3804, GS_A_3807, GS_A_3806, GS_A_4146, GS_A_4147, GS_A_4148, GS_A_4149, GS_A_5029, GS_A_4637, GS_A_4829, GS_A_4642, GS_A_4643, GS_A_4646,
GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4651, GS_A_4898, GS_A_4899, GS_A_4652, GS_A_4653, GS_A_4654, GS_A_4655, GS_A_4656, GS_A_4657, GS_A_4660, GS_A_4749, GS_A_4661, GS_A_4662, GS_A_4663, GS_A_4751, GS_A_4957,
GS_A_5215, VSDM_A_2222, VSDM_A_2223, VSDM_A_2225, VSDM_A_2226, VSDM_A_2227, VSDM_A_2228, VSDM_A_2231, VSDM_A_2233, VSDM_A_2234, VSDM_A_2236, VSDM_A_2240, VSDM_A_2271, VSDM_A_2674, VSDM_A_2950, VSDM_A_3002, TIP1_A_5120),
asList(TIP1_A_4133, TIP1_A_4149, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, VSDM_A_2797, VSDM_A_2799, VSDM_A_2824, VSDM_A_2845, VSDM_A_2747, VSDM_A_2940, VSDM_A_2942, VSDM_A_2943, VSDM_A_3021, GS_A_4388, GS_A_4388,
GS_A_4831, GS_A_4018, GS_A_4027, GS_A_4759, GS_A_3824, GS_A_3928, GS_A_3839, GS_A_5089, GS_A_3695, GS_A_3696, GS_A_3697, GS_A_4541, GS_A_5054, GS_A_5039, GS_A_5040, GS_A_3813, GS_A_3805, GS_A_5033, GS_A_4149, GS_A_3055,
GS_A_5073, GS_A_5030, GS_A_4640, TIP1_A_4932, GS_A_4330, GS_A_4435, GS_A_4436, GS_A_4437, GS_A_4448, GS_A_4449, GS_A_4450, GS_A_4451, GS_A_4453, GS_A_4455, GS_A_4456, GS_A_4457, GS_A_4458, GS_A_4459, GS_A_4460,
GS_A_4461, GS_A_4462, GS_A_4463, GS_A_4464, GS_A_4465, GS_A_4466, GS_A_4467, GS_A_4468, GS_A_4470, GS_A_4471, GS_A_4472, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4476, GS_A_4477, GS_A_4478, GS_A_4479, GS_A_4480, GS_A_4481,
GS_A_4482, VSDM_A_2669, VSDM_A_2748, GS_A_4503, GS_A_4504, GS_A_4505, GS_A_4506, GS_A_4507, GS_A_4508, GS_A_4509, GS_A_4510, GS_A_4511, GS_A_4512, GS_A_4513, GS_A_4514, GS_A_4515, GS_A_4516, GS_A_4517, GS_A_4518,
GS_A_4519, GS_A_4520, GS_A_4521, GS_A_4522, GS_A_4523, GS_A_4524, GS_A_4525, GS_A_4526, GS_A_4527, GS_A_4528, GS_A_4529, GS_A_4530, GS_A_4531, GS_A_4532, GS_A_4533, GS_A_4534, GS_A_4535, GS_A_4537, GS_A_4538, GS_A_4539,
GS_A_4540, GS_A_4368, GS_A_4384, GS_A_4385, GS_A_4386, GS_A_4387, GS_A_5322, GS_A_5035, GS_A_4388, GS_A_3839, GS_A_3841, GS_A_4808, GS_A_4641, GS_A_4748, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784,
GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2214, GS_A_2065, GS_A_2087, GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021, GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947,
GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_5017, GS_A_2331, GS_A_2332, GS_A_2333, GS_A_2339, GS_A_2343, GS_A_2345, GS_A_2347, GS_A_2355, GS_A_2356, GS_A_2357, GS_A_2359, GS_A_2360, GS_A_2361,
GS_A_2362, GS_A_2363, GS_A_2366, GS_A_3078, GS_A_3125, GS_A_3130, GS_A_3139, GS_A_3141, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096,
GS_A_4097, GS_A_4099, GS_A_4094, GS_A_4855, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_4100, GS_A_4101, GS_A_4102, GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401,
GS_A_4402, GS_A_4415, GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423, GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411,
GS_A_4412, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119, GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886,
GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_5250, GS_A_3909,
GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919, GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960,
GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971, GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984,
GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000, GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_5251, GS_A_4121, GS_A_4122, GS_A_4123,
GS_A_4124, GS_A_4125, GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134, GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4805, TIP1_A_2730,
VSDM_A_3006, GS_A_4864, GS_A_4720, GS_A_2070, GS_A_2071, GS_A_2072, GS_A_2073, GS_A_2074, GS_A_2075, GS_A_2156)),
@AfoHistory(documentVersion = "gemProdT_Intermediaer_VSDM_PTV150_V100", description = "Aus OPB1-Paket - ist State of Change und noch nicht released. Identisch mit gemProdT_Intermediaer_VSDM_PTV140_V100 zzgl. AFO GS-A_3027") gemProdT_Intermediaer_VSDM_PTV150_V100(
"gemProdT_Intermediär_VSDM_PTV1.5.0",
asList(TIP1_A_4126, TIP1_A_4157, VSDM_A_2348, VSDM_A_2349, VSDM_A_2350, VSDM_A_2351, VSDM_A_2353, VSDM_A_2356, VSDM_A_2357, VSDM_A_2358, VSDM_A_2359, VSDM_A_2547, VSDM_A_2548, VSDM_A_2549, VSDM_A_2550, VSDM_A_2673, VSDM_A_2704,
VSDM_A_2706, VSDM_A_2707, VSDM_A_2712, VSDM_A_2747, VSDM_A_2761, VSDM_A_2940, VSDM_A_3022, VSDM_A_3023, GS_A_4386, GS_A_4009, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_4036, GS_A_4763,
GS_A_4809, GS_A_3932, GS_A_3834, GS_A_3842, GS_A_3930, GS_A_3931, GS_A_3839, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3933, GS_A_3934, GS_A_4819, GS_A_3937, GS_A_3695, GS_A_3696, GS_A_5025, GS_A_5038, GS_A_3702,
GS_A_4543, GS_A_4545, GS_A_3856, GS_A_4547, GS_A_3801, GS_A_3796, GS_A_3816, GS_A_3804, GS_A_3807, GS_A_3806, GS_A_4146, GS_A_4147, GS_A_4148, GS_A_4149, GS_A_5029, GS_A_4637, GS_A_4829, GS_A_4642, GS_A_4643, GS_A_4646,
GS_A_4647, GS_A_4648, GS_A_4649, GS_A_4650, GS_A_4651, GS_A_4898, GS_A_4899, GS_A_4652, GS_A_4653, GS_A_4654, GS_A_4655, GS_A_4656, GS_A_4657, GS_A_4660, GS_A_4749, GS_A_4661, GS_A_4662, GS_A_4663, GS_A_4751, GS_A_4957,
GS_A_5077, GS_A_5131, GS_A_5215, GS_A_5336, VSDM_A_2222, VSDM_A_2223, VSDM_A_2225, VSDM_A_2226, VSDM_A_2227, VSDM_A_2228, VSDM_A_2231, VSDM_A_2233, VSDM_A_2234, VSDM_A_2236, VSDM_A_2240, VSDM_A_2271, VSDM_A_2674,
VSDM_A_2950, VSDM_A_3002, VSDM_A_3026, VSDM_A_3027, VSDM_A_3028, TIP1_A_5120),
asList(TIP1_A_4133, TIP1_A_4149, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, VSDM_A_2797, VSDM_A_2799, VSDM_A_2824, VSDM_A_2845, VSDM_A_2747, VSDM_A_2940, VSDM_A_2942, VSDM_A_2943, VSDM_A_3021, GS_A_4388, GS_A_4388,
GS_A_4831, GS_A_4018, GS_A_4027, GS_A_4759, GS_A_3824, GS_A_3928, GS_A_3839, GS_A_5089, GS_A_3695, GS_A_3696, GS_A_3697, GS_A_4541, GS_A_5054, GS_A_5039, GS_A_5040, GS_A_3813, GS_A_3805, GS_A_5033, GS_A_4149, GS_A_3055,
GS_A_5073, GS_A_5030, GS_A_4640, TIP1_A_4932, GS_A_4330, GS_A_4435, GS_A_4436, GS_A_4437, GS_A_4448, GS_A_4449, GS_A_4450, GS_A_4451, GS_A_4453, GS_A_4455, GS_A_4456, GS_A_4457, GS_A_4458, GS_A_4459, GS_A_4460,
GS_A_4461, GS_A_4462, GS_A_4463, GS_A_4464, GS_A_4465, GS_A_4466, GS_A_4467, GS_A_4468, GS_A_4470, GS_A_4471, GS_A_4472, GS_A_4473, GS_A_4474, GS_A_4475, GS_A_4476, GS_A_4477, GS_A_4478, GS_A_4479, GS_A_4480, GS_A_4481,
GS_A_4482, VSDM_A_2669, VSDM_A_2748, GS_A_4503, GS_A_4504, GS_A_4505, GS_A_4506, GS_A_4507, GS_A_4508, GS_A_4509, GS_A_4510, GS_A_4511, GS_A_4512, GS_A_4513, GS_A_4514, GS_A_4515, GS_A_4516, GS_A_4517, GS_A_4518,
GS_A_4519, GS_A_4520, GS_A_4521, GS_A_4522, GS_A_4523, GS_A_4524, GS_A_4525, GS_A_4526, GS_A_4527, GS_A_4528, GS_A_4529, GS_A_4530, GS_A_4531, GS_A_4532, GS_A_4533, GS_A_4534, GS_A_4535, GS_A_4537, GS_A_4538, GS_A_4539,
GS_A_4540, GS_A_4368, GS_A_4384, GS_A_4385, GS_A_4386, GS_A_4387, GS_A_5322, GS_A_5035, GS_A_4388, GS_A_3839, GS_A_3841, GS_A_4808, GS_A_4641, GS_A_4748, GS_A_4980, GS_A_4981, GS_A_4982, GS_A_4983, GS_A_4984, GS_A_3784,
GS_A_3737, GS_A_3747, GS_A_3753, GS_A_3772, GS_A_3756, GS_A_3760, GS_A_2214, GS_A_2065, GS_A_2087, GS_A_2213, GS_A_2076, GS_A_2174, GS_A_2177, GS_A_2012, GS_A_2021, GS_A_2046, GS_A_4944, GS_A_4945, GS_A_4946, GS_A_4947,
GS_A_2047, GS_A_2309, GS_A_2326, GS_A_2328, GS_A_2329, GS_A_2330, GS_A_5017, GS_A_2331, GS_A_2332, GS_A_2333, GS_A_2339, GS_A_2343, GS_A_2345, GS_A_2347, GS_A_2355, GS_A_2356, GS_A_2357, GS_A_2359, GS_A_2360, GS_A_2361,
GS_A_2362, GS_A_2363, GS_A_2366, GS_A_3078, GS_A_3125, GS_A_3130, GS_A_3139, GS_A_3141, GS_A_4085, GS_A_4086, GS_A_4087, GS_A_4088, GS_A_4089, GS_A_4892, GS_A_4090, GS_A_4091, GS_A_4092, GS_A_4093, GS_A_4095, GS_A_4096,
GS_A_4097, GS_A_4099, GS_A_4094, GS_A_4855, GS_A_5200, GS_A_5248, GS_A_5249, GS_A_4100, GS_A_4101, GS_A_4102, GS_A_4103, GS_A_4397, GS_A_4106, GS_A_4108, GS_A_4109, GS_A_4414, GS_A_4398, GS_A_4399, GS_A_4400, GS_A_4401,
GS_A_4402, GS_A_4415, GS_A_4418, GS_A_4416, GS_A_4417, GS_A_4419, GS_A_4420, GS_A_4421, GS_A_4422, GS_A_4423, GS_A_4424, GS_A_4425, GS_A_4404, GS_A_4405, GS_A_4406, GS_A_4407, GS_A_4408, GS_A_4409, GS_A_4410, GS_A_4411,
GS_A_4412, GS_A_4112, GS_A_4113, GS_A_4114, GS_A_4115, GS_A_4116, GS_A_4117, GS_A_4413, GS_A_4118, GS_A_4119, GS_A_3876, GS_A_3877, GS_A_3878, GS_A_3879, GS_A_3880, GS_A_3881, GS_A_3883, GS_A_3884, GS_A_3885, GS_A_3886,
GS_A_3887, GS_A_3888, GS_A_3889, GS_A_3890, GS_A_3891, GS_A_3892, GS_A_3893, GS_A_3894, GS_A_3895, GS_A_3896, GS_A_3902, GS_A_3903, GS_A_3904, GS_A_3905, GS_A_3906, GS_A_3907, GS_A_4120, GS_A_3908, GS_A_5250, GS_A_3909,
GS_A_3910, GS_A_3911, GS_A_3912, GS_A_3913, GS_A_3914, GS_A_3915, GS_A_3916, GS_A_3917, GS_A_3918, GS_A_3882, GS_A_3919, GS_A_3920, GS_A_3921, GS_A_3922, GS_A_3923, GS_A_3924, GS_A_3925, GS_A_3958, GS_A_3959, GS_A_3960,
GS_A_3961, GS_A_3962, GS_A_3963, GS_A_3964, GS_A_3965, GS_A_3966, GS_A_3967, GS_A_3968, GS_A_3969, GS_A_3970, GS_A_3971, GS_A_3972, GS_A_3975, GS_A_3976, GS_A_3977, GS_A_3978, GS_A_3981, GS_A_3982, GS_A_3983, GS_A_3984,
GS_A_3985, GS_A_3986, GS_A_3987, GS_A_3988, GS_A_3989, GS_A_3990, GS_A_3991, GS_A_3992, GS_A_3993, GS_A_3994, GS_A_4000, GS_A_3995, GS_A_3996, GS_A_3997, GS_A_3998, GS_A_3999, GS_A_5251, GS_A_4121, GS_A_4122, GS_A_4123,
GS_A_4124, GS_A_4125, GS_A_4126, GS_A_4127, GS_A_4128, GS_A_4129, GS_A_4130, GS_A_4131, GS_A_4893, GS_A_4132, GS_A_4133, GS_A_4134, GS_A_4135, GS_A_4136, GS_A_4137, GS_A_4138, GS_A_4139, GS_A_4805, TIP1_A_2730,
VSDM_A_3006, GS_A_4864, GS_A_4720, GS_A_2070, GS_A_2071, GS_A_2072, GS_A_2073, GS_A_2074, GS_A_2075, GS_A_2156)),
/**
* Partial definition of all AFOs of [gemProdT_Kon_PTV2.6.0] required to execute test targeting the LDAP-Proxy.
*/
gemProdT_Kon_PTV260_V100_LDAPProxy("gemProdT_Kon_PTV2.6.0 (LDAP-Proxy)", asList(GS_A_3695, GS_A_3804, GS_A_3856, GS_A_3931, GS_A_4386, TIP1_A_4514, TIP1_A_4515, TIP1_A_4518, TIP1_A_4689, TIP1_A_4693, TIP1_A_4696, TIP1_A_4812, TIP1_A_5401, TIP1_A_5516, TIP1_A_5517, TIP1_A_5518, TIP1_A_5519, TIP1_A_5520, TIP1_A_5521, TIP1_A_5568, TIP1_A_5570), asList(GS_A_3695, GS_A_3931, GS_A_4386, TIP1_A_4515, TIP1_A_4693, TIP1_A_4696, TIP1_A_4812)),
;
private PTStBs(final String reference, final List<AFO> testAFOs, final List<AFO> nonTestAFOs) {
assert nonNull(reference) : "There must be a non-null gematik reference identifier!";
assert !reference.isEmpty() : "There must be a non-empty gematik reference identifier!";
assert nonNull(testAFOs) : "There must be a non-null list of (test-relevant) AOFs!";
assert nonNull(nonTestAFOs) : "There must be a non-null list of (test-irrelevant) AOFs!";
this.reference = reference;
this.testAFOs = unmodifiableSet(new HashSet<>(testAFOs));
// assert disjoint(testAFOs, nonTestAFOs);
nonTestAFOs.stream().filter(testAFOs::contains).forEach(afo -> System.err.format("Hey dude; Please ask yourself (or the gematik) why %1$s contains AFO %2$s that is both testable and non-testable!", this.name(), afo));
this.afos = unmodifiableSet(concat(testAFOs.stream(), nonTestAFOs.stream()).collect(toSet()));
}
private final String reference;
@Override
public String getReference() {
return this.reference;
}
private final Set<AFO> afos;
@Override
public Set<AFO> getAFOs() {
return this.afos;
}
private final Set<AFO> testAFOs;
@Override
public Set<AFO> getTestableAFOs() {
return this.testAFOs;
}
}
| TIP1_A_5993, TIP1_A_5996, TIP1_A_5997, TIP1_A_5998, TIP1_A_6002 für OCSP | src/main/java/de/ehex/foss/gematik/specifications/PTStBs.java | TIP1_A_5993, TIP1_A_5996, TIP1_A_5997, TIP1_A_5998, TIP1_A_6002 für OCSP | <ide><path>rc/main/java/de/ehex/foss/gematik/specifications/PTStBs.java
<ide> TIP1_A_3581, TIP1_A_3884, TIP1_A_3592, TIP1_A_3887, TIP1_A_3596, TIP1_A_3631, TIP1_A_3632, TIP1_A_3633, TIP1_A_3634, TIP1_A_3635, TIP1_A_3637, TIP1_A_3638, TIP1_A_3642)),
<ide>
<ide> gemProdT_X_509_TSP_nonQES_eGK_PTV1_6_0_V1_2_1("gemProdT_X.509_TSP_nonQES_eGK_PTV1.6.0",
<del> asList(GS_A_4009, GS_A_4831, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_3834, GS_A_3842, GS_A_4810, GS_A_3931, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3934, GS_A_3937, GS_A_3939, GS_A_3946,
<add> asList(TIP1_A_5993, TIP1_A_5996, TIP1_A_5997, TIP1_A_5998, TIP1_A_6002, GS_A_4009, GS_A_4831, GS_A_4832, GS_A_4013, GS_A_4024, GS_A_4033, GS_A_4035, GS_A_4762, GS_A_3834, GS_A_3842, GS_A_4810, GS_A_3931, GS_A_3832, GS_A_3833, GS_A_3840, GS_A_4817, GS_A_3934, GS_A_3937, GS_A_3939, GS_A_3946,
<ide> GS_A_5038, GS_A_4146, GS_A_4147, GS_A_4148, GS_A_4149, GS_A_4145, GS_A_4159, GS_A_4160, GS_A_4721, GS_A_4669, GS_A_4673, GS_A_4675, GS_A_4677, GS_A_4678, GS_A_4679, GS_A_4684, GS_A_4686, GS_A_4687, GS_A_4688, GS_A_4690,
<ide> GS_A_4691, GS_A_4692, GS_A_5077, GS_A_5090, GS_A_4694, TIP1_A_3888),
<ide> asList(TIP1_A_4121, TIP1_A_4122, TIP1_A_4126, TIP1_A_4127, TIP1_A_4132, TIP1_A_4133, TIP1_A_4157, TIP1_A_4158, TIP1_A_2730, TIP1_A_5052, TIP1_A_2769, TIP1_A_2781, TIP1_A_2820, TIP1_A_3202, TIP1_A_3212, GS_A_4177, GS_A_4178, |
|
Java | apache-2.0 | 05bf9e7fa9344b0819a553b341012112c874d196 | 0 | dbaelz/Secludedness,dbaelz/Secludedness | package de.dbaelz.secludedness.screen;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Application.ApplicationType;
import com.badlogic.gdx.Input.Peripheral;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.maps.tiled.renderers.OrthogonalTiledMapRenderer;
import de.dbaelz.secludedness.MainGame;
import de.dbaelz.secludedness.level.Level;
import de.dbaelz.secludedness.level.Player;
import de.dbaelz.secludedness.manager.InputManager;
public class LevelScreen extends AbstractScreen {
private MainGame mGame;
private Level mLevel;
private Player mPlayer;
private InputManager mInputManager;
private Texture mTexture;
private TextureRegion mPlayerTexture;
private BitmapFont mFont;
private SpriteBatch mBatch = new SpriteBatch();
private OrthographicCamera mCamera;
private OrthogonalTiledMapRenderer mMapRenderer;
private boolean usePolling;
public LevelScreen(MainGame game, String mapName) {
super(game);
mGame = game;
mLevel = new Level(mapName);
}
@Override
public boolean isLevelScreen() {
return true;
}
@Override
public void render(float delta) {
doGameLogic(delta);
super.render(delta);
mMapRenderer.setView(mCamera);
mMapRenderer.render();
mBatch.begin();
mBatch.draw(mPlayerTexture, mPlayer.getPositionX(), mPlayer.getPositionY());
// TODO: Debug, remove with nice hud
mFont.draw(mBatch, "HEALTH: " + mPlayer.getHealth(), 50, 50);
mBatch.end();
}
@Override
public void show() {
super.show();
mPlayer = new Player(mLevel.getPlayerCellX(), mLevel.getPlayerCellY(), mLevel.getPlayerStartHealth());
mTexture = new Texture(Gdx.files.internal("textures/texture.png"));
mPlayerTexture = new TextureRegion(mTexture, 64, 0, 64, 64);
mFont = new BitmapFont();
mFont.setColor(1.0f, 0.5f, 1.0f, 1.0f);
// TODO: Change input based on settings
if ((Gdx.app.getType() == ApplicationType.Android) && (Gdx.input.isPeripheralAvailable(Peripheral.Accelerometer))) {
usePolling = true;
} else {
usePolling = false;
}
mInputManager = new InputManager(mLevel, mPlayer);
Gdx.input.setInputProcessor(mInputManager);
}
@Override
public void resize(int width, int height) {
mMapRenderer = new OrthogonalTiledMapRenderer(mLevel.getMap(), 1.0f);
mCamera = new OrthographicCamera();
mCamera.setToOrtho(false, width, height);
mCamera.update();
}
@Override
public void dispose() {
mMapRenderer.dispose();
mTexture.dispose();
mFont.dispose();
super.dispose();
}
private void doGameLogic(float delta){
if (usePolling) {
mInputManager.pollPlayerInput(delta, mLevel, mPlayer);
}
if (mPlayer.getHealth() == 0) {
// TODO: GAME OVER!
}
}
}
| Secludedness/src/main/java/de/dbaelz/secludedness/screen/LevelScreen.java | package de.dbaelz.secludedness.screen;
import com.badlogic.gdx.Gdx;
import de.dbaelz.secludedness.MainGame;
import de.dbaelz.secludedness.level.Level;
public class LevelScreen extends AbstractScreen {
private MainGame mGame;
private Level mLevel;
public LevelScreen(MainGame game, String mapName) {
super(game);
mGame = game;
mLevel = new Level(mapName);
Gdx.app.log("Test", "levelscreen");
}
}
| Level play and controllable, basic gaming features working
| Secludedness/src/main/java/de/dbaelz/secludedness/screen/LevelScreen.java | Level play and controllable, basic gaming features working | <ide><path>ecludedness/src/main/java/de/dbaelz/secludedness/screen/LevelScreen.java
<ide> package de.dbaelz.secludedness.screen;
<ide>
<ide> import com.badlogic.gdx.Gdx;
<add>import com.badlogic.gdx.Application.ApplicationType;
<add>import com.badlogic.gdx.Input.Peripheral;
<add>import com.badlogic.gdx.graphics.OrthographicCamera;
<add>import com.badlogic.gdx.graphics.Texture;
<add>import com.badlogic.gdx.graphics.g2d.BitmapFont;
<add>import com.badlogic.gdx.graphics.g2d.SpriteBatch;
<add>import com.badlogic.gdx.graphics.g2d.TextureRegion;
<add>import com.badlogic.gdx.maps.tiled.renderers.OrthogonalTiledMapRenderer;
<ide>
<ide> import de.dbaelz.secludedness.MainGame;
<ide> import de.dbaelz.secludedness.level.Level;
<add>import de.dbaelz.secludedness.level.Player;
<add>import de.dbaelz.secludedness.manager.InputManager;
<ide>
<ide> public class LevelScreen extends AbstractScreen {
<ide> private MainGame mGame;
<ide> private Level mLevel;
<add> private Player mPlayer;
<add> private InputManager mInputManager;
<add> private Texture mTexture;
<add> private TextureRegion mPlayerTexture;
<add> private BitmapFont mFont;
<add>
<add>
<add> private SpriteBatch mBatch = new SpriteBatch();
<add> private OrthographicCamera mCamera;
<add> private OrthogonalTiledMapRenderer mMapRenderer;
<add>
<add> private boolean usePolling;
<ide>
<ide> public LevelScreen(MainGame game, String mapName) {
<ide> super(game);
<ide> mGame = game;
<ide> mLevel = new Level(mapName);
<del> Gdx.app.log("Test", "levelscreen");
<add> }
<add>
<add> @Override
<add> public boolean isLevelScreen() {
<add> return true;
<add> }
<add>
<add> @Override
<add> public void render(float delta) {
<add> doGameLogic(delta);
<add>
<add> super.render(delta);
<add>
<add> mMapRenderer.setView(mCamera);
<add> mMapRenderer.render();
<add>
<add> mBatch.begin();
<add> mBatch.draw(mPlayerTexture, mPlayer.getPositionX(), mPlayer.getPositionY());
<add> // TODO: Debug, remove with nice hud
<add> mFont.draw(mBatch, "HEALTH: " + mPlayer.getHealth(), 50, 50);
<add> mBatch.end();
<add> }
<add>
<add> @Override
<add> public void show() {
<add> super.show();
<add>
<add> mPlayer = new Player(mLevel.getPlayerCellX(), mLevel.getPlayerCellY(), mLevel.getPlayerStartHealth());
<add> mTexture = new Texture(Gdx.files.internal("textures/texture.png"));
<add> mPlayerTexture = new TextureRegion(mTexture, 64, 0, 64, 64);
<add>
<add> mFont = new BitmapFont();
<add> mFont.setColor(1.0f, 0.5f, 1.0f, 1.0f);
<add>
<add> // TODO: Change input based on settings
<add> if ((Gdx.app.getType() == ApplicationType.Android) && (Gdx.input.isPeripheralAvailable(Peripheral.Accelerometer))) {
<add> usePolling = true;
<add> } else {
<add> usePolling = false;
<add> }
<add> mInputManager = new InputManager(mLevel, mPlayer);
<add> Gdx.input.setInputProcessor(mInputManager);
<add> }
<add>
<add> @Override
<add> public void resize(int width, int height) {
<add> mMapRenderer = new OrthogonalTiledMapRenderer(mLevel.getMap(), 1.0f);
<add> mCamera = new OrthographicCamera();
<add> mCamera.setToOrtho(false, width, height);
<add> mCamera.update();
<add> }
<add>
<add> @Override
<add> public void dispose() {
<add> mMapRenderer.dispose();
<add> mTexture.dispose();
<add> mFont.dispose();
<add> super.dispose();
<add> }
<add>
<add> private void doGameLogic(float delta){
<add> if (usePolling) {
<add> mInputManager.pollPlayerInput(delta, mLevel, mPlayer);
<add> }
<add>
<add>
<add> if (mPlayer.getHealth() == 0) {
<add> // TODO: GAME OVER!
<add> }
<ide> }
<ide> } |
|
Java | mit | 1c2fd095d1806b0a486dcca6ffb5c06e7cd349fa | 0 | FTC7393/state-machine-framework,FTC7393/state-machine-framework | package ftc.electronvolts.util.files;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.math.RoundingMode;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import ftc.electronvolts.util.InputExtractor;
/**
* This file was made by the electronVolts, FTC team 7393
*
* A logger that takes a list of Columns, which have a header and an
* InputExtractor, and logs each of them to a column in a file
*/
public class Logger {
public static class Column {
private final String header;
private final InputExtractor<?> input;
public Column(String header, InputExtractor<?> input) {
this.header = header;
this.input = input;
}
}
private static final DecimalFormat df = new DecimalFormat("#.#####");
static {
df.setRoundingMode(RoundingMode.HALF_UP);
}
private static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss");
private long logStart;
private PrintStream fileStream;
private final String beforeTimestamp, afterTimestamp;
private String titles;
private String fullFileName;
private final List<Column> columns;
/**
* @param beforeTimestamp the text to put before the timestamp in the filename
* @param afterTimestamp the text to put after the timestamp in the filename
* @param columns the columns that will be written to the file
*/
public Logger(String beforeTimestamp, String afterTimestamp, List<Column> columns) {
this.beforeTimestamp = beforeTimestamp;
this.afterTimestamp = afterTimestamp;
StringBuilder sb = new StringBuilder("time");
for (Column column : columns) {
sb.append(",").append(column.header);
}
titles = sb.append("\n").toString();
this.columns = columns;
}
/**
* write the column titles to the file
*/
public boolean start(File dir) {
logStart = System.nanoTime();
long millis = System.currentTimeMillis();
Date now = new Date(millis);
String date = dateFormat.format(now);
fullFileName = beforeTimestamp + date + afterTimestamp;
File file = new File(dir, fullFileName);
try {
fileStream = new PrintStream(new FileOutputStream(file));
fileStream.printf(titles);
return true;
} catch (IOException e) {
return false;
}
}
/**
* write the input columns to the file
*/
public void act() {
if (fileStream != null) {
long now = System.nanoTime();
StringBuilder line = new StringBuilder(df.format(1e-6 * (now - logStart)));
for (Column column : columns) {
line.append(",").append(column.input.getValue());
}
fileStream.printf(line.append("\n").toString());
}
}
/**
* close the file
*/
public void stop() {
if (fileStream != null) {
fileStream.close();
}
}
public String getFileName() {
return fullFileName;
}
} | src/ftc/electronvolts/util/files/Logger.java | package ftc.electronvolts.util.files;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.math.RoundingMode;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import ftc.electronvolts.util.InputExtractor;
/**
* This file was made by the electronVolts, FTC team 7393
*
* A logger that takes a list of Columns, which have a header and an
* InputExtractor, and logs each of them to a column in a file
*/
public class Logger {
public static class Column {
private final String header;
private final InputExtractor<?> input;
public Column(String header, InputExtractor<?> input) {
this.header = header;
this.input = input;
}
}
private static final DecimalFormat df = new DecimalFormat("#.#####");
static {
df.setRoundingMode(RoundingMode.HALF_UP);
}
private static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss");
private long logStart;
private PrintStream fileStream;
private final String beforeTimestamp, afterTimestamp;
private String titles;
private String fullFileName;
private final List<Column> columns;
/**
* @param beforeTimestamp the text to put before the timestamp in the filename
* @param afterTimestamp the text to put after the timestamp in the filename
* @param columns the columns that will be written to the file
*/
public Logger(String beforeTimestamp, String afterTimestamp, List<Column> columns) {
this.beforeTimestamp = beforeTimestamp;
this.afterTimestamp = afterTimestamp;
StringBuilder sb = new StringBuilder("time");
for (Column column : columns) {
sb.append("\t").append(column.header);
}
titles = sb.append("\n").toString();
this.columns = columns;
}
/**
* write the column titles to the file
*/
public boolean start(File dir) {
logStart = System.nanoTime();
long millis = System.currentTimeMillis();
Date now = new Date(millis);
String date = dateFormat.format(now);
fullFileName = beforeTimestamp + date + afterTimestamp;
File file = new File(dir, fullFileName);
try {
fileStream = new PrintStream(new FileOutputStream(file));
fileStream.printf(titles);
return true;
} catch (IOException e) {
return false;
}
}
/**
* write the input columns to the file
*/
public void act() {
if (fileStream != null) {
long now = System.nanoTime();
StringBuilder line = new StringBuilder(df.format(1e-6 * (now - logStart)));
for (Column column : columns) {
line.append(",").append(column.input.getValue());
}
fileStream.printf(line.append("\n").toString());
}
}
/**
* close the file
*/
public void stop() {
if (fileStream != null) {
fileStream.close();
}
}
public String getFileName() {
return fullFileName;
}
} | added commas to column header row | src/ftc/electronvolts/util/files/Logger.java | added commas to column header row | <ide><path>rc/ftc/electronvolts/util/files/Logger.java
<ide> this.afterTimestamp = afterTimestamp;
<ide> StringBuilder sb = new StringBuilder("time");
<ide> for (Column column : columns) {
<del> sb.append("\t").append(column.header);
<add> sb.append(",").append(column.header);
<ide> }
<ide> titles = sb.append("\n").toString();
<ide> this.columns = columns; |
|
Java | apache-2.0 | 01b7637e3a3afd283964230c5ad4ef60d960e7ae | 0 | signed/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,robovm/robovm-studio,Distrotech/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,pwoodworth/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,semonte/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,semonte/intellij-community,allotria/intellij-community,retomerz/intellij-community,ryano144/intellij-community,semonte/intellij-community,jexp/idea2,ahb0327/intellij-community,diorcety/intellij-community,joewalnes/idea-community,retomerz/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,samthor/intellij-community,diorcety/intellij-community,jagguli/intellij-community,kool79/intellij-community,slisson/intellij-community,samthor/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,holmes/intellij-community,hurricup/intellij-community,ryano144/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,muntasirsyed/intellij-community,orekyuu/intellij-community,slisson/intellij-community,izonder/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,FHannes/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,allotria/intellij-community,allotria/intellij-community,supersven/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,kool79/intellij-community,slisson/intellij-community,kool79/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,samthor/intellij-community,diorcety/intellij-community,robovm/robovm-studio,blademainer/intellij-community,signed/intellij-community,allotria/intellij-community,retomerz/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,Distrotech/intellij-community,allotria/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,ibinti/intellij-community,clumsy/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,Lekanich/intellij-community,slisson/intellij-community,ivan-fedorov/intellij-community,ryano144/intellij-community,muntasirsyed/intellij-community,izonder/intellij-community,kool79/intellij-community,apixandru/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,da1z/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,hurricup/intellij-community,da1z/intellij-community,caot/intellij-community,signed/intellij-community,caot/intellij-community,da1z/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,clumsy/intellij-community,michaelgallacher/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,vladmm/intellij-community,izonder/intellij-community,dslomov/intellij-community,ernestp/consulo,ryano144/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,ryano144/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,fnouama/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,caot/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,ivan-fedorov/intellij-community,semonte/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,joewalnes/idea-community,clumsy/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,joewalnes/idea-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,TangHao1987/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,holmes/intellij-community,supersven/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,jexp/idea2,supersven/intellij-community,amith01994/intellij-community,samthor/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,joewalnes/idea-community,salguarnieri/intellij-community,caot/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,xfournet/intellij-community,da1z/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,amith01994/intellij-community,joewalnes/idea-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,SerCeMan/intellij-community,ibinti/intellij-community,apixandru/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,supersven/intellij-community,signed/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,asedunov/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,signed/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,ibinti/intellij-community,supersven/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,adedayo/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,slisson/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,gnuhub/intellij-community,tmpgit/intellij-community,consulo/consulo,diorcety/intellij-community,jexp/idea2,xfournet/intellij-community,kdwink/intellij-community,samthor/intellij-community,izonder/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,slisson/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,supersven/intellij-community,xfournet/intellij-community,kdwink/intellij-community,jagguli/intellij-community,asedunov/intellij-community,ernestp/consulo,ryano144/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,orekyuu/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,holmes/intellij-community,fitermay/intellij-community,apixandru/intellij-community,hurricup/intellij-community,da1z/intellij-community,petteyg/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,semonte/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,joewalnes/idea-community,fengbaicanhe/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,robovm/robovm-studio,vladmm/intellij-community,fnouama/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,wreckJ/intellij-community,petteyg/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,holmes/intellij-community,semonte/intellij-community,dslomov/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,da1z/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,ernestp/consulo,signed/intellij-community,MER-GROUP/intellij-community,ivan-fedorov/intellij-community,ernestp/consulo,dslomov/intellij-community,ol-loginov/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,vladmm/intellij-community,ibinti/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,jexp/idea2,nicolargo/intellij-community,jexp/idea2,youdonghai/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,FHannes/intellij-community,kool79/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,gnuhub/intellij-community,signed/intellij-community,izonder/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,blademainer/intellij-community,petteyg/intellij-community,semonte/intellij-community,FHannes/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,Distrotech/intellij-community,petteyg/intellij-community,signed/intellij-community,caot/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,amith01994/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,holmes/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,blademainer/intellij-community,vladmm/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,petteyg/intellij-community,joewalnes/idea-community,petteyg/intellij-community,SerCeMan/intellij-community,jexp/idea2,ryano144/intellij-community,holmes/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,consulo/consulo,fnouama/intellij-community,diorcety/intellij-community,tmpgit/intellij-community,semonte/intellij-community,fitermay/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,izonder/intellij-community,akosyakov/intellij-community,jexp/idea2,kool79/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,FHannes/intellij-community,adedayo/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,FHannes/intellij-community,FHannes/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,consulo/consulo,Distrotech/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,jagguli/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,samthor/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,holmes/intellij-community,wreckJ/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,fitermay/intellij-community,blademainer/intellij-community,ol-loginov/intellij-community,salguarnieri/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,samthor/intellij-community,amith01994/intellij-community,kdwink/intellij-community,apixandru/intellij-community,holmes/intellij-community,ernestp/consulo,caot/intellij-community,robovm/robovm-studio,joewalnes/idea-community,MER-GROUP/intellij-community,robovm/robovm-studio,nicolargo/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,joewalnes/idea-community,FHannes/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,ol-loginov/intellij-community,izonder/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,da1z/intellij-community,tmpgit/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,FHannes/intellij-community,wreckJ/intellij-community,holmes/intellij-community,apixandru/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,consulo/consulo,tmpgit/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,signed/intellij-community,Distrotech/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,da1z/intellij-community,ahb0327/intellij-community,supersven/intellij-community,Lekanich/intellij-community,slisson/intellij-community,supersven/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,supersven/intellij-community,ol-loginov/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,fnouama/intellij-community,ernestp/consulo,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,allotria/intellij-community,petteyg/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,signed/intellij-community,retomerz/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,amith01994/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,ftomassetti/intellij-community,signed/intellij-community,FHannes/intellij-community,caot/intellij-community,ibinti/intellij-community,adedayo/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,ibinti/intellij-community,adedayo/intellij-community,caot/intellij-community,xfournet/intellij-community,signed/intellij-community,fitermay/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,signed/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,kool79/intellij-community,samthor/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,slisson/intellij-community,nicolargo/intellij-community,caot/intellij-community,consulo/consulo,pwoodworth/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,petteyg/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,izonder/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,retomerz/intellij-community,apixandru/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,caot/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,hurricup/intellij-community,clumsy/intellij-community,da1z/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,fnouama/intellij-community,xfournet/intellij-community,consulo/consulo,akosyakov/intellij-community,apixandru/intellij-community,fitermay/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,jexp/idea2,muntasirsyed/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,vladmm/intellij-community,xfournet/intellij-community,slisson/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,robovm/robovm-studio,allotria/intellij-community,dslomov/intellij-community,ibinti/intellij-community,adedayo/intellij-community,adedayo/intellij-community,retomerz/intellij-community,petteyg/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,fitermay/intellij-community,FHannes/intellij-community | /*
* Copyright (c) 2000-2006 JetBrains s.r.o. All Rights Reserved.
*/
package com.intellij.codeInspection.ui.actions;
import com.intellij.codeEditor.printing.ExportToHTMLSettings;
import com.intellij.codeInspection.InspectionProfile;
import com.intellij.codeInspection.InspectionsBundle;
import com.intellij.codeInspection.ex.GlobalInspectionContextImpl;
import com.intellij.codeInspection.ex.InspectionApplication;
import com.intellij.codeInspection.ex.InspectionTool;
import com.intellij.codeInspection.export.ExportToHTMLDialog;
import com.intellij.codeInspection.export.HTMLExportFrameMaker;
import com.intellij.codeInspection.export.HTMLExporter;
import com.intellij.codeInspection.reference.RefEntity;
import com.intellij.codeInspection.reference.RefImplicitConstructor;
import com.intellij.codeInspection.reference.RefModule;
import com.intellij.codeInspection.ui.InspectionGroupNode;
import com.intellij.codeInspection.ui.InspectionNode;
import com.intellij.codeInspection.ui.InspectionResultsView;
import com.intellij.codeInspection.ui.InspectionTreeNode;
import com.intellij.codeInspection.util.RefEntityAlphabeticalComparator;
import com.intellij.ide.BrowserUtil;
import com.intellij.ide.highlighter.XmlFileType;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.components.PathMacroManager;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.ListPopup;
import com.intellij.openapi.ui.popup.PopupStep;
import com.intellij.openapi.ui.popup.util.BaseListPopupStep;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.util.JDOMUtil;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.util.ui.tree.TreeUtil;
import org.jdom.Document;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import javax.swing.*;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* User: anna
* Date: 11-Jan-2006
*/
public class ExportHTMLAction extends AnAction {
private InspectionResultsView myView;
@NonNls private static final String PROBLEMS = "problems";
@NonNls private static final String HTML = "HTML";
@NonNls private static final String XML = "XML";
public ExportHTMLAction(final InspectionResultsView view) {
super(InspectionsBundle.message("inspection.action.export.html"), null, IconLoader.getIcon("/actions/export.png"));
myView = view;
}
public void actionPerformed(AnActionEvent e) {
final ListPopup popup = JBPopupFactory.getInstance().createListPopup(
new BaseListPopupStep<String>(InspectionsBundle.message("inspection.action.export.popup.title"), new String[]{HTML, XML}) {
public PopupStep onChosen(final String selectedValue, final boolean finalChoice) {
exportHTML(Comparing.strEqual(selectedValue, HTML));
return PopupStep.FINAL_CHOICE;
}
});
InspectionResultsView.showPopup(e, popup);
}
private void exportHTML(final boolean exportToHTML) {
ExportToHTMLDialog exportToHTMLDialog = new ExportToHTMLDialog(myView.getProject(), exportToHTML);
final ExportToHTMLSettings exportToHTMLSettings = ExportToHTMLSettings.getInstance(myView.getProject());
if (exportToHTMLSettings.OUTPUT_DIRECTORY == null) {
exportToHTMLSettings.OUTPUT_DIRECTORY = PathManager.getHomePath() + File.separator + "inspections";
}
exportToHTMLDialog.reset();
exportToHTMLDialog.show();
if (!exportToHTMLDialog.isOK()) {
return;
}
exportToHTMLDialog.apply();
final String outputDirectoryName = exportToHTMLSettings.OUTPUT_DIRECTORY;
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
final Runnable exportRunnable = new Runnable() {
public void run() {
if (!exportToHTML) {
dupm2XML(outputDirectoryName);
} else {
HTMLExportFrameMaker maker = new HTMLExportFrameMaker(outputDirectoryName, myView.getProject());
maker.start();
try {
exportHTML(maker);
}
catch (ProcessCanceledException e) {
// Do nothing here.
}
maker.done();
}
}
};
if (!ProgressManager.getInstance().runProcessWithProgressSynchronously(exportRunnable,
exportToHTML ? InspectionsBundle.message("inspection.generating.html.progress.title")
: InspectionsBundle.message("inspection.generating.xml.progress.title"), true, myView.getProject())) {
return;
}
if (exportToHTML && exportToHTMLSettings.OPEN_IN_BROWSER) {
BrowserUtil.launchBrowser(exportToHTMLSettings.OUTPUT_DIRECTORY + File.separator + "index.html");
}
}
});
}
private void dupm2XML(final String outputDirectoryName) {
try {
new File(outputDirectoryName).mkdirs();
final InspectionTreeNode root = myView.getTree().getRoot();
final IOException[] ex = new IOException[1];
TreeUtil.traverse(root, new TreeUtil.Traverse() {
public boolean accept(final Object node) {
if (node instanceof InspectionNode) {
InspectionNode toolNode = (InspectionNode)node;
Element problems = new Element(PROBLEMS);
final InspectionTool tool = toolNode.getTool();
final Set<InspectionTool> tools = getWorkedTools(toolNode);
for (InspectionTool inspectionTool : tools) {
inspectionTool.exportResults(problems);
}
PathMacroManager.getInstance(myView.getProject()).collapsePaths(problems);
try {
JDOMUtil.writeDocument(new Document(problems),
outputDirectoryName + File.separator + tool.getShortName() + XmlFileType.DOT_DEFAULT_EXTENSION,
CodeStyleSettingsManager.getSettings(null).getLineSeparator());
}
catch (IOException e) {
ex[0] = e;
}
}
return true;
}
});
if (ex[0] != null) {
throw ex[0];
}
final Element element = new Element(InspectionApplication.INSPECTIONS_NODE);
final String profileName = myView.getCurrentProfileName();
if (profileName != null) {
element.setAttribute(InspectionApplication.PROFILE, profileName);
}
JDOMUtil.writeDocument(new Document(element),
outputDirectoryName + File.separator + InspectionApplication.DESCRIPTIONS + XmlFileType.DOT_DEFAULT_EXTENSION,
CodeStyleSettingsManager.getSettings(null).getLineSeparator());
}
catch (final IOException e) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
Messages.showErrorDialog(myView, e.getMessage());
}
});
}
}
private Set<InspectionTool> getWorkedTools(InspectionNode node) {
final Set<InspectionTool> result = new HashSet<InspectionTool>();
final InspectionTool tool = node.getTool();
if (myView.getCurrentProfileName() != null){
result.add(tool);
return result;
}
final String shortName = tool.getShortName();
final GlobalInspectionContextImpl context = myView.getGlobalInspectionContext();
final Set<Pair<InspectionTool,InspectionProfile>> tools = context.getTools().get(shortName);
if (tools != null) { //dummy entry points tool
for (Pair<InspectionTool, InspectionProfile> pair : tools) {
result.add(pair.first);
}
}
return result;
}
private void exportHTML(HTMLExportFrameMaker frameMaker) {
final InspectionTreeNode root = myView.getTree().getRoot();
final Enumeration children = root.children();
while (children.hasMoreElements()) {
InspectionTreeNode node = (InspectionTreeNode)children.nextElement();
if (node instanceof InspectionNode) {
exportHTML(frameMaker, (InspectionNode)node);
}
else if (node instanceof InspectionGroupNode) {
final Enumeration groupChildren = node.children();
while (groupChildren.hasMoreElements()) {
InspectionNode toolNode = (InspectionNode)groupChildren.nextElement();
exportHTML(frameMaker, toolNode);
}
}
}
}
private void exportHTML(HTMLExportFrameMaker frameMaker, InspectionNode node) {
Set<InspectionTool> tools = getWorkedTools(node);
final InspectionTool tool = node.getTool();
HTMLExporter exporter =
new HTMLExporter(frameMaker.getRootFolder() + "/" + tool.getShortName(), tool.getComposer(), myView.getProject());
frameMaker.startInspection(tool);
exportHTML(tools, exporter);
exporter.generateReferencedPages();
}
@SuppressWarnings({"HardCodedStringLiteral"})
private void exportHTML(Set<InspectionTool> tools, HTMLExporter exporter) {
StringBuffer packageIndex = new StringBuffer();
packageIndex.append("<html><body>");
final Map<String, Set<RefEntity>> content = new HashMap<String, Set<RefEntity>>();
for (InspectionTool tool : tools) {
content.putAll(tool.getPackageContent());
}
final Set<RefEntity> defaultPackageEntities = content.remove(null);
if (defaultPackageEntities != null) {
content.put("default package" , defaultPackageEntities);
}
ArrayList<String> packageNames = new ArrayList<String>(content.keySet());
Collections.sort(packageNames);
for (String packageName : packageNames) {
appendPackageReference(packageIndex, packageName);
final ArrayList<RefEntity> packageContent = new ArrayList<RefEntity>(content.get(packageName));
Collections.sort(packageContent, RefEntityAlphabeticalComparator.getInstance());
StringBuffer contentIndex = new StringBuffer();
contentIndex.append("<html><body>");
for (RefEntity refElement : packageContent) {
if (refElement instanceof RefImplicitConstructor) {
refElement = ((RefImplicitConstructor)refElement).getOwnerClass();
}
contentIndex.append("<a HREF=\"");
contentIndex.append(exporter.getURL(refElement));
contentIndex.append("\" target=\"elementFrame\">");
contentIndex.append(refElement.getName());
contentIndex.append("</a><br>");
exporter.createPage(refElement);
}
contentIndex.append("</body></html>");
HTMLExporter.writeFile(exporter.getRootFolder(), packageName + "-index.html", contentIndex, myView.getProject());
}
final Set<RefModule> modules = new HashSet<RefModule>();
for (InspectionTool tool : tools) {
final Set<RefModule> problems = tool.getModuleProblems();
if (problems != null) {
modules.addAll(problems);
}
}
final List<RefModule> sortedModules = new ArrayList<RefModule>(modules);
Collections.sort(sortedModules, RefEntityAlphabeticalComparator.getInstance());
for (RefModule module : sortedModules) {
appendPackageReference(packageIndex, module.getName());
StringBuffer contentIndex = new StringBuffer();
contentIndex.append("<html><body>");
contentIndex.append("<a HREF=\"");
contentIndex.append(exporter.getURL(module));
contentIndex.append("\" target=\"elementFrame\">");
contentIndex.append(module.getName());
contentIndex.append("</a><br>");
exporter.createPage(module);
contentIndex.append("</body></html>");
HTMLExporter.writeFile(exporter.getRootFolder(), module.getName() + "-index.html", contentIndex, myView.getProject());
}
packageIndex.append("</body></html>");
HTMLExporter.writeFile(exporter.getRootFolder(), "index.html", packageIndex, myView.getProject());
}
@SuppressWarnings({"HardCodedStringLiteral"})
private static void appendPackageReference(StringBuffer packageIndex, String packageName) {
packageIndex.append("<a HREF=\"");
packageIndex.append(packageName);
packageIndex.append("-index.html\" target=\"packageFrame\">");
packageIndex.append(packageName);
packageIndex.append("</a><br>");
}
}
| inspections/impl/com/intellij/codeInspection/ui/actions/ExportHTMLAction.java | /*
* Copyright (c) 2000-2006 JetBrains s.r.o. All Rights Reserved.
*/
package com.intellij.codeInspection.ui.actions;
import com.intellij.codeEditor.printing.ExportToHTMLSettings;
import com.intellij.codeInspection.InspectionProfile;
import com.intellij.codeInspection.InspectionsBundle;
import com.intellij.codeInspection.ex.GlobalInspectionContextImpl;
import com.intellij.codeInspection.ex.InspectionApplication;
import com.intellij.codeInspection.ex.InspectionTool;
import com.intellij.codeInspection.export.ExportToHTMLDialog;
import com.intellij.codeInspection.export.HTMLExportFrameMaker;
import com.intellij.codeInspection.export.HTMLExporter;
import com.intellij.codeInspection.reference.RefEntity;
import com.intellij.codeInspection.reference.RefImplicitConstructor;
import com.intellij.codeInspection.reference.RefModule;
import com.intellij.codeInspection.ui.InspectionGroupNode;
import com.intellij.codeInspection.ui.InspectionNode;
import com.intellij.codeInspection.ui.InspectionResultsView;
import com.intellij.codeInspection.ui.InspectionTreeNode;
import com.intellij.codeInspection.util.RefEntityAlphabeticalComparator;
import com.intellij.ide.BrowserUtil;
import com.intellij.ide.highlighter.XmlFileType;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.components.PathMacroManager;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.ListPopup;
import com.intellij.openapi.ui.popup.PopupStep;
import com.intellij.openapi.ui.popup.util.BaseListPopupStep;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.util.JDOMUtil;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.util.ui.tree.TreeUtil;
import org.jdom.Document;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import javax.swing.*;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* User: anna
* Date: 11-Jan-2006
*/
public class ExportHTMLAction extends AnAction {
private InspectionResultsView myView;
@NonNls private static final String PROBLEMS = "problems";
@NonNls private static final String HTML = "HTML";
@NonNls private static final String XML = "XML";
public ExportHTMLAction(final InspectionResultsView view) {
super(InspectionsBundle.message("inspection.action.export.html"), null, IconLoader.getIcon("/actions/export.png"));
myView = view;
}
public void actionPerformed(AnActionEvent e) {
final ListPopup popup = JBPopupFactory.getInstance().createListPopup(
new BaseListPopupStep<String>(InspectionsBundle.message("inspection.action.export.popup.title"), new String[]{HTML, XML}) {
public PopupStep onChosen(final String selectedValue, final boolean finalChoice) {
exportHTML(Comparing.strEqual(selectedValue, HTML));
return PopupStep.FINAL_CHOICE;
}
});
InspectionResultsView.showPopup(e, popup);
}
private void exportHTML(final boolean exportToHTML) {
ExportToHTMLDialog exportToHTMLDialog = new ExportToHTMLDialog(myView.getProject(), exportToHTML);
final ExportToHTMLSettings exportToHTMLSettings = ExportToHTMLSettings.getInstance(myView.getProject());
if (exportToHTMLSettings.OUTPUT_DIRECTORY == null) {
exportToHTMLSettings.OUTPUT_DIRECTORY = PathManager.getHomePath() + File.separator + "inspections";
}
exportToHTMLDialog.reset();
exportToHTMLDialog.show();
if (!exportToHTMLDialog.isOK()) {
return;
}
exportToHTMLDialog.apply();
final String outputDirectoryName = exportToHTMLSettings.OUTPUT_DIRECTORY;
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
final Runnable exportRunnable = new Runnable() {
public void run() {
if (!exportToHTML) {
dupm2XML(outputDirectoryName);
} else {
HTMLExportFrameMaker maker = new HTMLExportFrameMaker(outputDirectoryName, myView.getProject());
maker.start();
try {
exportHTML(maker);
}
catch (ProcessCanceledException e) {
// Do nothing here.
}
maker.done();
}
}
};
if (!ProgressManager.getInstance().runProcessWithProgressSynchronously(exportRunnable,
exportToHTML ? InspectionsBundle.message("inspection.generating.html.progress.title")
: InspectionsBundle.message("inspection.generating.xml.progress.title"), true, myView.getProject())) {
return;
}
if (exportToHTML && exportToHTMLSettings.OPEN_IN_BROWSER) {
BrowserUtil.launchBrowser(exportToHTMLSettings.OUTPUT_DIRECTORY + File.separator + "index.html");
}
}
});
}
private void dupm2XML(final String outputDirectoryName) {
try {
new File(outputDirectoryName).mkdirs();
final InspectionTreeNode root = myView.getTree().getRoot();
final IOException[] ex = new IOException[1];
TreeUtil.traverse(root, new TreeUtil.Traverse() {
public boolean accept(final Object node) {
if (node instanceof InspectionNode) {
InspectionNode toolNode = (InspectionNode)node;
Element problems = new Element(PROBLEMS);
final InspectionTool tool = toolNode.getTool();
final Set<InspectionTool> tools = getWorkedTools(toolNode);
for (InspectionTool inspectionTool : tools) {
inspectionTool.exportResults(problems);
}
PathMacroManager.getInstance(myView.getProject()).collapsePaths(problems);
try {
JDOMUtil.writeDocument(new Document(problems),
outputDirectoryName + File.separator + tool.getShortName() + XmlFileType.DOT_DEFAULT_EXTENSION,
CodeStyleSettingsManager.getSettings(null).getLineSeparator());
}
catch (IOException e) {
ex[0] = e;
}
}
return true;
}
});
if (ex[0] != null) {
throw ex[0];
}
final Element element = new Element(InspectionApplication.INSPECTIONS_NODE);
final String profileName = myView.getCurrentProfileName();
if (profileName != null) {
element.setAttribute(InspectionApplication.PROFILE, profileName);
}
JDOMUtil.writeDocument(new Document(element),
outputDirectoryName + File.separator + InspectionApplication.DESCRIPTIONS + XmlFileType.DOT_DEFAULT_EXTENSION,
CodeStyleSettingsManager.getSettings(null).getLineSeparator());
}
catch (final IOException e) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
Messages.showErrorDialog(myView, e.getMessage());
}
});
}
}
private Set<InspectionTool> getWorkedTools(InspectionNode node) {
final Set<InspectionTool> result = new HashSet<InspectionTool>();
final InspectionTool tool = node.getTool();
if (myView.getCurrentProfileName() != null){
result.add(tool);
return result;
}
final String shortName = tool.getShortName();
final GlobalInspectionContextImpl context = myView.getGlobalInspectionContext();
final Set<Pair<InspectionTool,InspectionProfile>> tools = context.getTools().get(shortName);
if (tools != null) { //dummy entry points tool
for (Pair<InspectionTool, InspectionProfile> pair : tools) {
result.add(pair.first);
}
}
return result;
}
private void exportHTML(HTMLExportFrameMaker frameMaker) {
final InspectionTreeNode root = myView.getTree().getRoot();
final Enumeration children = root.children();
while (children.hasMoreElements()) {
InspectionTreeNode node = (InspectionTreeNode)children.nextElement();
if (node instanceof InspectionNode) {
exportHTML(frameMaker, (InspectionNode)node);
}
else if (node instanceof InspectionGroupNode) {
final Enumeration groupChildren = node.children();
while (groupChildren.hasMoreElements()) {
InspectionNode toolNode = (InspectionNode)groupChildren.nextElement();
exportHTML(frameMaker, toolNode);
}
}
}
}
private void exportHTML(HTMLExportFrameMaker frameMaker, InspectionNode node) {
Set<InspectionTool> tools = getWorkedTools(node);
final InspectionTool tool = node.getTool();
HTMLExporter exporter =
new HTMLExporter(frameMaker.getRootFolder() + "/" + tool.getShortName(), tool.getComposer(), myView.getProject());
frameMaker.startInspection(tool);
exportHTML(tools, exporter);
exporter.generateReferencedPages();
}
@SuppressWarnings({"HardCodedStringLiteral"})
private void exportHTML(Set<InspectionTool> tools, HTMLExporter exporter) {
StringBuffer packageIndex = new StringBuffer();
packageIndex.append("<html><body>");
final Map<String, Set<RefEntity>> content = new HashMap<String, Set<RefEntity>>();
for (InspectionTool tool : tools) {
content.putAll(tool.getPackageContent());
}
ArrayList<String> packageNames = new ArrayList<String>(content.keySet());
Collections.sort(packageNames, RefEntityAlphabeticalComparator.getInstance());
for (String packageName : packageNames) {
appendPackageReference(packageIndex, packageName);
final ArrayList<RefEntity> packageContent = new ArrayList<RefEntity>(content.get(packageName));
Collections.sort(packageContent, RefEntityAlphabeticalComparator.getInstance());
StringBuffer contentIndex = new StringBuffer();
contentIndex.append("<html><body>");
for (RefEntity refElement : packageContent) {
if (refElement instanceof RefImplicitConstructor) {
refElement = ((RefImplicitConstructor)refElement).getOwnerClass();
}
contentIndex.append("<a HREF=\"");
contentIndex.append(exporter.getURL(refElement));
contentIndex.append("\" target=\"elementFrame\">");
contentIndex.append(refElement.getName());
contentIndex.append("</a><br>");
exporter.createPage(refElement);
}
contentIndex.append("</body></html>");
HTMLExporter.writeFile(exporter.getRootFolder(), packageName + "-index.html", contentIndex, myView.getProject());
}
final Set<RefModule> modules = new HashSet<RefModule>();
for (InspectionTool tool : tools) {
final Set<RefModule> problems = tool.getModuleProblems();
if (problems != null) {
modules.addAll(problems);
}
}
final List<RefModule> sortedModules = new ArrayList<RefModule>(modules);
Collections.sort(sortedModules, RefEntityAlphabeticalComparator.getInstance());
for (RefModule module : sortedModules) {
appendPackageReference(packageIndex, module.getName());
StringBuffer contentIndex = new StringBuffer();
contentIndex.append("<html><body>");
contentIndex.append("<a HREF=\"");
contentIndex.append(exporter.getURL(module));
contentIndex.append("\" target=\"elementFrame\">");
contentIndex.append(module.getName());
contentIndex.append("</a><br>");
exporter.createPage(module);
contentIndex.append("</body></html>");
HTMLExporter.writeFile(exporter.getRootFolder(), module.getName() + "-index.html", contentIndex, myView.getProject());
}
packageIndex.append("</body></html>");
HTMLExporter.writeFile(exporter.getRootFolder(), "index.html", packageIndex, myView.getProject());
}
@SuppressWarnings({"HardCodedStringLiteral"})
private static void appendPackageReference(StringBuffer packageIndex, String packageName) {
packageIndex.append("<a HREF=\"");
packageIndex.append(packageName);
packageIndex.append("-index.html\" target=\"packageFrame\">");
packageIndex.append(packageName);
packageIndex.append("</a><br>");
}
}
| export to HTML - default package support (IDEADEV-3277)
| inspections/impl/com/intellij/codeInspection/ui/actions/ExportHTMLAction.java | export to HTML - default package support (IDEADEV-3277) | <ide><path>nspections/impl/com/intellij/codeInspection/ui/actions/ExportHTMLAction.java
<ide> content.putAll(tool.getPackageContent());
<ide> }
<ide>
<add> final Set<RefEntity> defaultPackageEntities = content.remove(null);
<add> if (defaultPackageEntities != null) {
<add> content.put("default package" , defaultPackageEntities);
<add> }
<add>
<ide> ArrayList<String> packageNames = new ArrayList<String>(content.keySet());
<ide>
<del> Collections.sort(packageNames, RefEntityAlphabeticalComparator.getInstance());
<add> Collections.sort(packageNames);
<ide> for (String packageName : packageNames) {
<ide> appendPackageReference(packageIndex, packageName);
<ide> final ArrayList<RefEntity> packageContent = new ArrayList<RefEntity>(content.get(packageName)); |
|
Java | apache-2.0 | bbd16331b43bbc8f303694f81588b1b3b1e431ed | 0 | sylvanaar/IDLua,sylvanaar/IDLua,consulo/consulo-lua,consulo/consulo-lua,consulo/consulo-lua,sylvanaar/IDLua | /*
* Copyright 2010 Jon S Akhtar (Sylvanaar)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sylvanaar.idea.Lua.findUsages;
import com.intellij.lang.cacheBuilder.DefaultWordsScanner;
import com.intellij.lang.cacheBuilder.WordsScanner;
import com.intellij.lang.findUsages.FindUsagesProvider;
import com.intellij.psi.PsiElement;
import com.sylvanaar.idea.Lua.lang.lexer.LuaLexer;
import com.sylvanaar.idea.Lua.lang.lexer.LuaTokenTypes;
import com.sylvanaar.idea.Lua.lang.psi.LuaNamedElement;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* @author ven
*/
public class LuaFindUsagesProvider implements FindUsagesProvider {
@NotNull private static final DefaultWordsScanner DEFAULT_WORDS_SCANNER =
new DefaultWordsScanner(new LuaLexer(),
LuaTokenTypes.IDENTIFIERS_SET, LuaTokenTypes.COMMENT_SET, LuaTokenTypes.LITERALS_SET) {{
setMayHaveFileRefsInLiterals(true);
}};
@NotNull
public WordsScanner getWordsScanner() {
return DEFAULT_WORDS_SCANNER;
}
public boolean canFindUsagesFor(@NotNull final PsiElement psiElement) {
return psiElement instanceof LuaNamedElement;
}
@Nullable
public String getHelpId(@NotNull final PsiElement psiElement) {
return null;
}
@NotNull
public String getType(@NotNull final PsiElement element) {
return "identifier";
}
@NotNull
public String getDescriptiveName(@NotNull final PsiElement element) {
return getName(element);
}
@NotNull
public String getNodeText(@NotNull final PsiElement element, final boolean useFullName) {
final StringBuilder sb = new StringBuilder(getType(element));
if (sb.length() > 0) {
sb.append(" ");
}
sb.append(getName(element));
return sb.toString();
}
@NotNull
private String getName(@NotNull final PsiElement element) {
if (element instanceof LuaNamedElement) {
return ((LuaNamedElement) element).getName();
}
return "";
}
} | src/findUsages/LuaFindUsagesProvider.java | /*
* Copyright 2010 Jon S Akhtar (Sylvanaar)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sylvanaar.idea.Lua.findUsages;
import com.intellij.lang.cacheBuilder.DefaultWordsScanner;
import com.intellij.lang.cacheBuilder.WordsScanner;
import com.intellij.lang.findUsages.FindUsagesProvider;
import com.intellij.psi.PsiElement;
import com.sylvanaar.idea.Lua.lang.lexer.LuaFlexLexer;
import com.sylvanaar.idea.Lua.lang.lexer.LuaTokenTypes;
import com.sylvanaar.idea.Lua.lang.psi.LuaNamedElement;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* @author ven
*/
public class LuaFindUsagesProvider implements FindUsagesProvider {
@NotNull private static final DefaultWordsScanner DEFAULT_WORDS_SCANNER =
new DefaultWordsScanner(new LuaFlexLexer(),
LuaTokenTypes.IDENTIFIERS_SET, LuaTokenTypes.COMMENT_SET, LuaTokenTypes.LITERALS_SET) {{
setMayHaveFileRefsInLiterals(true);
}};
@NotNull
public WordsScanner getWordsScanner() {
return DEFAULT_WORDS_SCANNER;
}
public boolean canFindUsagesFor(@NotNull final PsiElement psiElement) {
return psiElement instanceof LuaNamedElement;
}
@Nullable
public String getHelpId(@NotNull final PsiElement psiElement) {
return null;
}
@NotNull
public String getType(@NotNull final PsiElement element) {
return "identifier";
}
@NotNull
public String getDescriptiveName(@NotNull final PsiElement element) {
return getName(element);
}
@NotNull
public String getNodeText(@NotNull final PsiElement element, final boolean useFullName) {
final StringBuilder sb = new StringBuilder(getType(element));
if (sb.length() > 0) {
sb.append(" ");
}
sb.append(getName(element));
return sb.toString();
}
@NotNull
private String getName(@NotNull final PsiElement element) {
if (element instanceof LuaNamedElement) {
return ((LuaNamedElement) element).getName();
}
return "";
}
} | fix lexer error while indexing files
| src/findUsages/LuaFindUsagesProvider.java | fix lexer error while indexing files | <ide><path>rc/findUsages/LuaFindUsagesProvider.java
<ide> import com.intellij.lang.cacheBuilder.WordsScanner;
<ide> import com.intellij.lang.findUsages.FindUsagesProvider;
<ide> import com.intellij.psi.PsiElement;
<del>import com.sylvanaar.idea.Lua.lang.lexer.LuaFlexLexer;
<add>import com.sylvanaar.idea.Lua.lang.lexer.LuaLexer;
<ide> import com.sylvanaar.idea.Lua.lang.lexer.LuaTokenTypes;
<ide> import com.sylvanaar.idea.Lua.lang.psi.LuaNamedElement;
<ide> import org.jetbrains.annotations.NotNull;
<ide> */
<ide> public class LuaFindUsagesProvider implements FindUsagesProvider {
<ide> @NotNull private static final DefaultWordsScanner DEFAULT_WORDS_SCANNER =
<del> new DefaultWordsScanner(new LuaFlexLexer(),
<add> new DefaultWordsScanner(new LuaLexer(),
<ide> LuaTokenTypes.IDENTIFIERS_SET, LuaTokenTypes.COMMENT_SET, LuaTokenTypes.LITERALS_SET) {{
<ide> setMayHaveFileRefsInLiterals(true);
<ide> }}; |
|
Java | mit | dea539c09734a6b1231d481ff9b9b829e54803f3 | 0 | gregwym/joos-compiler-java,gregwym/joos-compiler-java | package ca.uwaterloo.joos.codegen;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import java.util.Stack;
import java.util.logging.Level;
import java.util.logging.Logger;
import ca.uwaterloo.joos.Main;
import ca.uwaterloo.joos.ast.ASTNode;
import ca.uwaterloo.joos.ast.ASTNode.ChildTypeUnmatchException;
import ca.uwaterloo.joos.ast.FileUnit;
import ca.uwaterloo.joos.ast.Modifiers;
import ca.uwaterloo.joos.ast.Modifiers.Modifier;
import ca.uwaterloo.joos.ast.decl.BodyDeclaration;
import ca.uwaterloo.joos.ast.decl.ClassDeclaration;
import ca.uwaterloo.joos.ast.decl.ConstructorDeclaration;
import ca.uwaterloo.joos.ast.decl.FieldDeclaration;
import ca.uwaterloo.joos.ast.decl.LocalVariableDeclaration;
import ca.uwaterloo.joos.ast.decl.MethodDeclaration;
import ca.uwaterloo.joos.ast.decl.OnDemandImport;
import ca.uwaterloo.joos.ast.decl.PackageDeclaration;
import ca.uwaterloo.joos.ast.decl.ParameterDeclaration;
import ca.uwaterloo.joos.ast.decl.SingleImport;
import ca.uwaterloo.joos.ast.decl.TypeDeclaration;
import ca.uwaterloo.joos.ast.decl.VariableDeclaration;
import ca.uwaterloo.joos.ast.expr.AssignmentExpression;
import ca.uwaterloo.joos.ast.expr.ClassCreateExpression;
import ca.uwaterloo.joos.ast.expr.Expression;
import ca.uwaterloo.joos.ast.expr.InfixExpression;
import ca.uwaterloo.joos.ast.expr.InfixExpression.InfixOperator;
import ca.uwaterloo.joos.ast.expr.MethodInvokeExpression;
import ca.uwaterloo.joos.ast.expr.UnaryExpression;
import ca.uwaterloo.joos.ast.expr.name.Name;
import ca.uwaterloo.joos.ast.expr.name.QualifiedName;
import ca.uwaterloo.joos.ast.expr.name.SimpleName;
import ca.uwaterloo.joos.ast.expr.primary.ArrayAccess;
import ca.uwaterloo.joos.ast.expr.primary.ArrayCreate;
import ca.uwaterloo.joos.ast.expr.primary.FieldAccess;
import ca.uwaterloo.joos.ast.expr.primary.LiteralPrimary;
import ca.uwaterloo.joos.ast.expr.primary.LiteralPrimary.LiteralType;
import ca.uwaterloo.joos.ast.expr.primary.Primary;
import ca.uwaterloo.joos.ast.statement.Block;
import ca.uwaterloo.joos.ast.statement.ForStatement;
import ca.uwaterloo.joos.ast.statement.IfStatement;
import ca.uwaterloo.joos.ast.statement.ReturnStatement;
import ca.uwaterloo.joos.ast.statement.WhileStatement;
import ca.uwaterloo.joos.ast.type.ArrayType;
import ca.uwaterloo.joos.ast.type.PrimitiveType;
import ca.uwaterloo.joos.ast.type.PrimitiveType.Primitive;
import ca.uwaterloo.joos.ast.type.ReferenceType;
import ca.uwaterloo.joos.ast.type.Type;
import ca.uwaterloo.joos.checker.HierarchyChecker;
import ca.uwaterloo.joos.symboltable.BlockScope;
import ca.uwaterloo.joos.symboltable.Scope;
import ca.uwaterloo.joos.symboltable.SemanticsVisitor;
import ca.uwaterloo.joos.symboltable.SymbolTable;
import ca.uwaterloo.joos.symboltable.TableEntry;
import ca.uwaterloo.joos.symboltable.TypeScope;
import com.google.common.io.Files;
public class CodeGenerator extends SemanticsVisitor {
public static final Logger logger = Main.getLogger(CodeGenerator.class);
protected static final String BOOLEAN_TRUE = "0xffffffff";
protected static final String BOOLEAN_FALSE = "0x0";
protected static final String NULL = "0x0";
protected File asmFile = null;
protected static File startFile = null;
protected Set<String> externs = null;
protected List<String> texts = null;
protected List<String> data = null;
protected List<String> statics = null;
protected static List<String> staticInit = new ArrayList<String>();
private String methodLabel = null;
private Integer literalCount = 0;
private Integer comparisonCount = 0;
private Integer loopCount = 0;
private Integer conditionCount = 0;
private Boolean dereferenceVariable = true;
private Boolean referenceCurrentObject = true;
private Set<Class<?>> complexNodes = null;
public CodeGenerator(SymbolTable table) {
super(table);
logger.setLevel(Level.FINER);
this.complexNodes = new HashSet<Class<?>>();
this.complexNodes.add(ReferenceType.class);
this.complexNodes.add(PackageDeclaration.class);
this.complexNodes.add(SingleImport.class);
this.complexNodes.add(OnDemandImport.class);
}
private void initialize() {
this.asmFile = null;
this.statics = new ArrayList<String>();
this.externs = new HashSet<String>();
this.texts = new ArrayList<String>();
this.data = new ArrayList<String>();
this.methodLabel = null;
this.literalCount = 0;
this.comparisonCount = 0;
this.loopCount = 0;
this.dereferenceVariable = true;
this.referenceCurrentObject = true;
// Place the runtime.s externs
this.externs.add("__malloc");
this.externs.add("__debexit");
this.externs.add("__exception");
this.externs.add("NATIVEjava.io.OutputStream.nativeWrite");
this.externs.add("SubtypeTable");
this.texts.add("");
this.texts.add("section .text");
this.texts.add("");
this.data.add("");
this.data.add("section .data");
this.data.add("");
}
private void addExtern(String label, String declarationName) {
if (!this.getCurrentScope().getParentTypeScope().getSymbols().containsKey(declarationName)) {
logger.fine("Adding extern " + declarationName + " within scope " + this.getCurrentScope().getParentTypeScope());
this.externs.add(label);
}
}
private void addVtable(String fullyQualifiedTypeName) {
if (!this.getCurrentScope().getParentTypeScope().getName().equals(fullyQualifiedTypeName)) {
this.externs.add(fullyQualifiedTypeName + "_VTABLE");
}
}
@Override
public void willVisit(ASTNode node) throws Exception {
super.willVisit(node);
if (node instanceof FileUnit) {
this.initialize();
} else if (node instanceof TypeDeclaration) {
// Construct output file
// String filename = this.getCurrentScope().getName();
String filename = node.getIdentifier();
// filename = filename.replace('.', '/');
filename = "./output/" + filename + ".s";
logger.finer(filename);
this.asmFile = new File(filename);
} else if (node instanceof MethodDeclaration) {
Modifiers modifiers = ((MethodDeclaration) node).getModifiers();
if (!modifiers.containModifier(Modifier.NATIVE) && !modifiers.containModifier(Modifier.ABSTRACT)) {
// Define method labels
this.methodLabel = methodLabel(this.getCurrentScope().getName());
if (((MethodDeclaration) node).getName().getSimpleName().equals("test") && modifiers.containModifier(Modifier.STATIC)) {
this.methodLabel = "_start";
startFile = this.asmFile;
}
this.texts.add("global " + this.methodLabel);
this.texts.add(this.methodLabel + ":");
// Preamble
this.texts.add("push ebp\t\t\t; Preamble");
this.texts.add("mov ebp, esp");
// Allocate space for local variables
this.texts.add("sub esp, " + (((MethodDeclaration) node).totalLocalVariables * 4));
// Push registers
// this.texts.add("push eax"); // Leave eax as return value
this.texts.add("push ebx");
this.texts.add("push ecx");
this.texts.add("push edx");
this.texts.add("");
if (((MethodDeclaration) node).getName().getSimpleName().equals("test") && modifiers.containModifier(Modifier.STATIC)) {
this.texts.add("call Start_StaticInit");
}
if (node instanceof ConstructorDeclaration) {
// Call any superclass constructor
// TODO call super constructor...
// Get the class holding the constructor
ClassDeclaration cd = (ClassDeclaration) this.getCurrentScope().getParentTypeScope().getReferenceNode();
List<FieldDeclaration> fds = cd.getBody().getFields();
this.texts.add("push eax\t\t\t; Push the new object address");
this.texts.add("mov ebx, [ebp + 8]\t\t; Current Object");
this.texts.add("add ebx, 4\t\t\t; First space reserved");
// Initialize field variables here
for (FieldDeclaration fd : fds) {
if (fd.getInitial() != null) {
this.texts.add("push ebx\t\t\t; Push address of field");
fd.getInitial().accept(this);
this.texts.add("pop ebx\t\t\t; Pop LHS");
this.texts.add("mov [ebx], eax");
}
this.texts.add("add ebx, 4\t\t\t; ");
}
}
}
}
}
@Override
public boolean visit(ASTNode node) throws Exception {
logger.finest("Visiting " + node);
if (node instanceof MethodInvokeExpression) {
this.generateMethodInvoke((MethodInvokeExpression) node);
return false;
} else if (node instanceof ClassCreateExpression) {
this.generateClassCreate((ClassCreateExpression) node);
return false;
} else if (node instanceof InfixExpression) {
this.generateInfixExpression((InfixExpression) node);
return false;
} else if (node instanceof ArrayCreate) {
this.generateArrayCreate((ArrayCreate) node);
return false;
} else if (node instanceof ArrayAccess) {
this.generateArrayAccess((ArrayAccess) node);
return false;
} else if (node instanceof LiteralPrimary) {
this.generateLiteral((LiteralPrimary) node);
return false;
} else if (node instanceof UnaryExpression) {
this.generateUnaryExpression((UnaryExpression) node);
return false;
} else if (node instanceof LocalVariableDeclaration) {
this.generateLocalVariableDeclaration((LocalVariableDeclaration) node);
return false;
} else if (node instanceof AssignmentExpression) {
this.generateAssignmentExpression((AssignmentExpression) node);
return false;
} else if (node instanceof ForStatement) {
this.generateForLoop((ForStatement) node);
return false;
} else if (node instanceof WhileStatement) {
this.generateWhileStatement((WhileStatement) node);
return false;
} else if (node instanceof IfStatement) {
this.generateIfStatement((IfStatement) node);
return false;
} else if (node instanceof FieldDeclaration) {
if (((FieldDeclaration) node).getModifiers().containModifier(Modifier.STATIC)) {
this.generateStaticFieldDeclaration((FieldDeclaration) node);
}
return false;
} else if (node instanceof FieldAccess) {
this.generateFieldAccess((FieldAccess) node);
return false;
} else if (node instanceof MethodDeclaration) {
Block body = ((MethodDeclaration) node).getBody();
if (body != null) {
body.accept(this);
}
return false;
} else if (node instanceof Name) {
this.generateVariableAccess((Name) node);
return false;
}
return !this.complexNodes.contains(node.getClass());
}
@Override
public void didVisit(ASTNode node) throws Exception {
if (node instanceof FileUnit) {
// File content generated, write to file
File dir = this.asmFile.getParentFile();
for (String label : this.statics) {
staticInit.add("\t" + label + "_INIT\n");
staticInit.add("\tcall " + label + "_INIT" + '\n');
}
if (dir != null) {
dir.mkdirs();
}
this.asmFile.createNewFile();
BufferedWriter asmWriter = new BufferedWriter(new FileWriter(this.asmFile));
for (String line : this.externs) {
asmWriter.write("extern " + line);
asmWriter.newLine();
}
for (String line : this.texts) {
if (!line.startsWith("global") && !line.startsWith("section")) {
line = "\t" + line;
if (!line.endsWith(":")) {
line = "\t" + line;
}
}
asmWriter.write(line);
asmWriter.newLine();
}
for (String line : this.data) {
asmWriter.write(line);
asmWriter.newLine();
}
asmWriter.close();
} else if (node instanceof TypeDeclaration) {
this.texts.add("global " + this.getCurrentScope().getName() + "_VTABLE");
this.texts.add(this.getCurrentScope().getName() + "_VTABLE:");
this.texts.add("dd " + ((TypeDeclaration) node).getHierarchyTableIndex());
for (Entry<Integer, Scope> entry : ((TypeDeclaration) node).getSignatures().entrySet()) {
Scope methodScope = entry.getValue();
if (!this.getCurrentScope().getSymbols().containsKey(methodScope.getName())) {
this.externs.add(methodLabel(methodScope.getName()));
}
if (((MethodDeclaration) methodScope.getReferenceNode()).getModifiers().containModifier(Modifier.STATIC) && ((MethodDeclaration) methodScope.getReferenceNode()).getName().getName().equals("test")) {
this.externs.add("Start_StaticInit");
this.texts.add("dd _start");
startFile = asmFile;
} else
this.texts.add("dd " + methodLabel(methodScope.getName()));
}
this.texts.add("");
} else if (node instanceof MethodDeclaration) {
Modifiers modifiers = ((MethodDeclaration) node).getModifiers();
if (node instanceof ConstructorDeclaration) {
this.texts.add("pop eax\t\t\t; Restore THIS pointer to eax");
}
if (!modifiers.containModifier(Modifier.NATIVE) && !modifiers.containModifier(Modifier.ABSTRACT)) {
// Postamble
this.texts.add(this.methodLabel + "_END:");
// Pop registers
this.texts.add("pop edx\t\t\t\t; Postamble");
this.texts.add("pop ecx");
this.texts.add("pop ebx");
// this.texts.add("pop eax"); // Leave eax as return value
// Deallocate space for local variables
this.texts.add("add esp, " + (((MethodDeclaration) node).totalLocalVariables * 4));
// Restore frame pointer
this.texts.add("pop ebp");
if (this.methodLabel.equals("_start")) {
this.texts.add("call __debexit");
} else {
this.texts.add("ret");
}
this.texts.add("");
}
} else if (node instanceof ReturnStatement) {
this.texts.add("jmp " + this.methodLabel + "_END");
}
super.didVisit(node);
}
private static String methodLabel(String methodSignature) {
String label = methodSignature.replaceAll("[(),]", "_");
label = label.replaceAll("\\[\\]", "_ARRAY");
return label;
}
private static String staticLabel(String fieldName) {
String label = "STATIC" + fieldName;
return label;
}
private void generateVariableDereference(TableEntry entry) throws Exception {
VariableDeclaration varDecl = (VariableDeclaration) entry.getNode();
if (varDecl instanceof ParameterDeclaration) {
this.texts.add("mov eax, [ebp + " + (4 + varDecl.getIndex() * 4) + "]\t; Accessing parameter: " + entry.getName());
} else if (varDecl instanceof FieldDeclaration) {
if (varDecl.getModifiers().containModifier(Modifier.STATIC)) {
String label = staticLabel(entry.getName());
this.addExtern(label, entry.getName());
this.texts.add("mov eax, [" + label + "]\t; Accessing static: " + entry.getName());
} else {
this.texts.add("mov eax, [eax + " + (varDecl.getIndex() * 4) + "]\t; Accessing field: " + entry.getName());
}
} else if (varDecl instanceof LocalVariableDeclaration) {
this.texts.add("mov eax, [ebp - " + (varDecl.getIndex() * 4) + "]\t; Accessing local: " + entry.getName());
}
}
private void generateVariableAddress(TableEntry entry) throws Exception {
VariableDeclaration varDecl = (VariableDeclaration) entry.getNode();
if (varDecl instanceof ParameterDeclaration) {
this.texts.add("mov eax, ebp");
this.texts.add("add eax, " + (4 + varDecl.getIndex() * 4) + "\t\t\t; Address of parameter: " + entry.getName());
} else if (varDecl instanceof FieldDeclaration) {
if (varDecl.getModifiers().containModifier(Modifier.STATIC)) {
String label = staticLabel(entry.getName());
this.addExtern(label, entry.getName());
this.texts.add("mov eax, " + label + "\t; Address of static: " + entry.getName());
} else {
this.texts.add("add eax, " + (varDecl.getIndex() * 4) + "\t\t\t; Address of field: " + entry.getName());
}
} else if (varDecl instanceof LocalVariableDeclaration) {
this.texts.add("mov eax, ebp");
this.texts.add("sub eax, " + (varDecl.getIndex() * 4) + "\t\t\t; Address of local: " + entry.getName());
}
}
private void generateVariableAccess(Name name) throws Exception {
int i = 0;
if (name instanceof Name && this.referenceCurrentObject) {
this.texts.add("mov eax, [ebp + 8]\t; Current object");
}
if (name instanceof SimpleName) {
TableEntry entry = ((SimpleName) name).getOriginalDeclaration();
if (entry == null) {
String field = ((SimpleName) name).getName();
if (field.equals("length")) {
this.texts.add("mov eax, [eax]\t\t; Fetch array length");
} else {
throw new Exception("Unknown field " + field);
}
} else if (this.dereferenceVariable) {
this.generateVariableDereference(entry);
} else {
this.generateVariableAddress(entry);
}
} else if (name instanceof QualifiedName) {
TableEntry entry = ((QualifiedName) name).getOriginalDeclaration();
if (entry.getNode() instanceof VariableDeclaration) {
this.generateVariableDereference(entry);
}
List<TableEntry> originalDeclarations = ((QualifiedName) name).originalDeclarations;
for (i = 0; i < originalDeclarations.size(); i++) {
entry = originalDeclarations.get(i);
if (i != originalDeclarations.size() - 1 || this.dereferenceVariable) {
this.generateVariableDereference(entry);
} else {
this.generateVariableAddress(entry);
}
}
List<String> components = ((QualifiedName) name).getComponents();
if (components.size() - originalDeclarations.size() > 1) {
String field = components.get(components.size() - 1);
if (field.equals("length")) {
this.texts.add("mov eax, [eax]\t\t; Fetch array size");
} else {
throw new Exception("Unknown field " + field);
}
}
}
}
private void generateMethodInvoke(MethodInvokeExpression methodInvoke) throws Exception {
// Push parameters to stack
List<Expression> args = methodInvoke.getArguments();
int i = args.size();
for (i--; i >= 0; i--) {
Expression arg = args.get(i);
// Generate code for arg
arg.accept(this);
this.texts.add("push eax\t\t\t; Push parameter #" + (i + 1) + " to stack");
}
String methodName = methodInvoke.fullyQualifiedName;
String methodLabel = methodLabel(methodName);
if (methodLabel.equals("java.io.OutputStream.nativeWrite_INT__")) {
// Calling native write
methodLabel = "NATIVEjava.io.OutputStream.nativeWrite";
this.texts.add("pop eax\t\t\t\t; Pop parameter for native write");
this.texts.add("push ebx");
this.texts.add("push ecx");
this.texts.add("push edx");
this.texts.add("call " + methodLabel);
this.texts.add("pop edx");
this.texts.add("pop ecx");
this.texts.add("pop ebx");
this.texts.add("");
return;
}
// Push THIS to stack, THIS should be the address of the object
Primary primary = methodInvoke.getPrimary();
Name name = methodInvoke.getName();
if (primary != null) {
// If primary is not null, means is invoking method on a primary
primary.accept(this);
} else if (name instanceof QualifiedName) {
logger.finest("Generating method invoke for name " + name + " with #" + ((QualifiedName) name).originalDeclarations.size() + " entries");
this.texts.add("mov eax, [ebp + 8]\t; Current object");
List<TableEntry> originalDeclarations = ((QualifiedName) name).originalDeclarations;
for (TableEntry entry : originalDeclarations) {
this.generateVariableDereference(entry);
}
} else if (name instanceof SimpleName) {
// Invoking method within same Type, THIS is parameter #0
logger.finest("Generating method invoke for simple name " + name);
this.texts.add("mov eax, [ebp + 8]\t; Current object");
}
this.texts.add("push eax\t\t\t; Push THIS as parameter #0");
// Invoke the method
BlockScope methodBlock = this.table.getBlock(methodInvoke.fullyQualifiedName);
BodyDeclaration methodNode = (BodyDeclaration) methodBlock.getReferenceNode();
if (methodNode.getModifiers().containModifier(Modifier.STATIC)) {
this.texts.add("mov edx, " + methodBlock.getParentTypeScope().getName() + "_VTABLE");
} else {
this.texts.add("mov edx, [eax]\t; Dereference for the address of VTable");
}
this.texts.add("call [edx + " + (methodNode.getIndex() * 4 + 4) + "]\t; Call " + methodInvoke.fullyQualifiedName);
// Pop THIS from stack
this.texts.add("pop edx\t\t\t\t; Pop THIS");
// Pop parameters from stack
for (i = 0; i < args.size(); i++) {
this.texts.add("pop edx\t\t\t\t; Pop parameter #" + (i + 1) + " from stack");
}
// Add the vtable containing the method label
this.addVtable(methodBlock.getParentTypeScope().getName());
this.texts.add("");
}
private void generateClassCreate(ClassCreateExpression classCreate) throws Exception {
// Push parameters to stack
List<Expression> args = classCreate.getArguments();
int i = args.size();
for (i--; i >= 0; i--) {
Expression arg = args.get(i);
// Generate code for arg
arg.accept(this);
this.texts.add("push eax\t\t\t; Push parameter #" + i + " to stack");
}
// Allocate space for the new object
TypeScope typeScope = this.table.getType(classCreate.getType().getFullyQualifiedName());
TypeDeclaration typeDecl = (TypeDeclaration) typeScope.getReferenceNode();
this.texts.add("mov eax, " + (4 + typeDecl.totalFieldDeclarations * 4) + "\t\t\t; Size of the object");
this.texts.add("call __malloc");
this.texts.add("push eax\t\t\t; Push new object pointer as THIS");
this.addVtable(typeScope.getName());
this.texts.add("mov ebx, " + typeScope.getName() + "_VTABLE");
this.texts.add("mov [eax], ebx");
// Invoke the constructor
String constructorName = classCreate.fullyQualifiedName;
String constructorLabel = methodLabel(constructorName);
this.texts.add("call " + constructorLabel);
// Pop THIS from stack
this.texts.add("pop edx\t\t\t\t; Pop THIS");
// Pop parameters from stack
for (i = 0; i < args.size(); i++) {
this.texts.add("pop edx\t\t\t\t; Pop parameters #" + i + " from stack");
}
// Add to extern if is not local method
if (!this.getCurrentScope().getParentTypeScope().getSymbols().containsKey(constructorName)) {
this.externs.add(constructorLabel);
}
this.texts.add("");
}
private void generateValueToString(Type exprType) throws Exception {
if (exprType instanceof PrimitiveType) {
String valueOfLabel = "java.lang.String.valueOf_" + exprType.getFullyQualifiedName() + "__";
this.texts.add("push eax\t\t\t; Push the primitive as perameter #1");
this.texts.add("push eax\t\t\t; Push something as a fake THIS");
this.texts.add("call " + valueOfLabel);
this.addExtern(valueOfLabel, "java.lang.String.valueOf(" + exprType.getFullyQualifiedName() + ",)");
this.texts.add("pop edx");
this.texts.add("pop edx");
} else if (exprType instanceof ReferenceType) {
this.texts.add("push eax\t\t\t; Push the reference variable address as THIS");
this.texts.add("mov eax, [eax]\t; Obtain VTable address");
this.texts.add("add eax, 12\t\t; Shift to the index of toString");
this.texts.add("mov eax, [eax]\t; Dereference address of toString");
this.texts.add("call eax");
this.texts.add("pop edx");
}
}
private void generateStringConcat(Expression op1, Expression op2) throws Exception {
Type op1Type = op1.exprType;
Type op2Type = op2.exprType;
// op1 at EAX, op2 at EDX
if (!op1Type.getFullyQualifiedName().equals("java.lang.String")) {
this.texts.add("push edx\t\t\t; Push op2 to stack first");
this.generateValueToString(op1Type);
this.texts.add("pop edx\t\t\t\t; Pop op2");
}
// op1 String on stack, op2 at EDX
if (!op2Type.getFullyQualifiedName().equals("java.lang.String")) {
this.texts.add("push eax\t\t\t; Push op1 String to stack");
this.texts.add("mov eax, edx");
this.generateValueToString(op2Type);
this.texts.add("mov edx, eax");
this.texts.add("pop eax\t\t\t\t; Pop op1 String");
}
// op1 String at EAX, op2 String at EDX. Invoke
// java.lang.String.concat(java.lang.String) now.
this.texts.add("push edx");
this.texts.add("push eax");
this.texts.add("call java.lang.String.concat_java.lang.String__");
this.addExtern("java.lang.String.concat_java.lang.String__", "java.lang.String.concat(java.lang.String,)");
this.texts.add("pop edx");
this.texts.add("pop edx");
}
private void generateInfixExpression(InfixExpression infixExpr) throws Exception {
InfixOperator operator = infixExpr.getOperator();
List<Expression> operands = infixExpr.getOperands();
int comparisonCount = this.comparisonCount;
this.comparisonCount++;
// Instance of
if (operator.equals(InfixOperator.INSTANCEOF)) {
Expression operand = operands.get(0);
Type rhsType = infixExpr.getRHS();
operand.accept(this);
this.texts.add("mov eax,[eax]\t\t\t; get Vtable of current object");
this.texts.add("mov eax,[eax]\t\t\t; get the index of current object");
TypeScope typeScope = this.table.getType(rhsType.getFullyQualifiedName());
TypeDeclaration typeNode = (TypeDeclaration) typeScope.getReferenceNode();
this.texts.add("mov ebx," + typeNode.getHierarchyTableIndex() + "\t; get the index of RHS of instanceof");
this.texts.add("mov edx," + HierarchyChecker.getTotalClassNum() + "\t; get the fixed shift");
this.texts.add("mov ecx, SubtypeTable\t; get the subtypeTable");
this.texts.add("imul eax, edx\t; Multiply row with row width");
this.texts.add("add eax, ecx");
this.texts.add("add eax, ebx");
this.texts.add("mov eax, [eax]\t; get the subtype flag value");
return;
}
if (operator.equals(InfixOperator.AND) || operator.equals(InfixOperator.OR)) {
operands.get(0).accept(this);
} else {
// Generate code for the second operand and push to the stack
operands.get(1).accept(this);
this.texts.add("push eax\t\t\t; Push second operand value");
// Generate code for the first operand and result stay in eax
operands.get(0).accept(this);
this.texts.add("pop edx\t\t\t\t; Pop second operand value to edx");
}
switch (operator) {
case AND:
this.texts.add("cmp eax, " + BOOLEAN_FALSE);
this.texts.add("je " + "__COMPARISON_FALSE_" + comparisonCount + "\t; If Lazy AND reach false, skip second operands");
operands.get(1).accept(this);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case BAND:
this.texts.add("and eax, edx");
break;
case BOR:
this.texts.add("or eax, edx");
break;
case EQ:
this.texts.add("cmp eax, edx");
this.texts.add("je " + "__COMPARISON_TRUE_" + comparisonCount);
this.texts.add("mov eax, " + BOOLEAN_FALSE);
this.texts.add("jmp " + "__COMPARISON_FALSE_" + comparisonCount);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
this.texts.add("mov eax, " + BOOLEAN_TRUE);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case GEQ:
this.texts.add("cmp eax, edx");
this.texts.add("jge " + "__COMPARISON_TRUE_" + comparisonCount);
this.texts.add("mov eax, " + BOOLEAN_FALSE);
this.texts.add("jmp " + "__COMPARISON_FALSE_" + comparisonCount);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
this.texts.add("mov eax, " + BOOLEAN_TRUE);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case GT:
this.texts.add("cmp eax, edx");
this.texts.add("jg " + "__COMPARISON_TRUE_" + comparisonCount);
this.texts.add("mov eax, " + BOOLEAN_FALSE);
this.texts.add("jmp " + "__COMPARISON_FALSE_" + comparisonCount);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
this.texts.add("mov eax, " + BOOLEAN_TRUE);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case LEQ:
this.texts.add("cmp eax, edx");
this.texts.add("jle " + "__COMPARISON_TRUE_" + comparisonCount);
this.texts.add("mov eax, " + BOOLEAN_FALSE);
this.texts.add("jmp " + "__COMPARISON_FALSE_" + comparisonCount);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
this.texts.add("mov eax, " + BOOLEAN_TRUE);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case LT:
this.texts.add("cmp eax, edx");
this.texts.add("jl " + "__COMPARISON_TRUE_" + comparisonCount);
this.texts.add("mov eax, " + BOOLEAN_FALSE);
this.texts.add("jmp " + "__COMPARISON_FALSE_" + comparisonCount);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
this.texts.add("mov eax, " + BOOLEAN_TRUE);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case MINUS:
// eax = first operand - second operand
this.texts.add("sub eax, edx");
break;
case NEQ:
this.texts.add("cmp eax, edx");
this.texts.add("jne " + "__COMPARISON_TRUE_" + comparisonCount);
this.texts.add("mov eax, " + BOOLEAN_FALSE);
this.texts.add("jmp " + "__COMPARISON_FALSE_" + comparisonCount);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
this.texts.add("mov eax, " + BOOLEAN_TRUE);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case OR:
this.texts.add("cmp eax, " + BOOLEAN_FALSE);
this.texts.add("jne " + "__COMPARISON_TRUE_" + comparisonCount + "\t; If Lazy OR reach true, skip second operands");
operands.get(1).accept(this);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
break;
case PERCENT:
// eax = first operand % second operand
this.texts.add("cmp edx, 0\t\t\t; Check zero divider");
this.texts.add("je __exception\t\t; Throw exception");
this.texts.add("mov ebx, 0");
this.texts.add("xchg edx, ebx\t\t; Set edx to 0, and ebx to be the divider");
this.texts.add("idiv ebx\t\t\t; Divide edx:eax with ebx");
this.texts.add("mov eax, edx\t\t; Move the remainder to eax");
break;
case PLUS:
// eax = first operand + second operand
if (operands.get(0).exprType.getFullyQualifiedName().equals("java.lang.String") || operands.get(1).exprType.getFullyQualifiedName().equals("java.lang.String")) {
this.generateStringConcat(operands.get(0), operands.get(1));
} else {
this.texts.add("add eax, edx");
}
break;
case SLASH:
// eax = first operand / second operand
this.texts.add("cmp edx, 0\t\t\t; Check zero divider");
this.texts.add("je __exception\t\t; Throw exception");
this.texts.add("mov ebx, 0");
this.texts.add("xchg edx, ebx\t\t; Set edx to 0, and ebx to be the divider");
this.texts.add("idiv ebx\t\t\t; Divide edx:eax with ebx, quotient will be in eax");
break;
case STAR:
// eax = first operand * second operand
this.texts.add("imul eax, edx");
break;
default:
throw new Exception("Unkown infix operator type " + operator);
}
this.texts.add("");
}
private void generateArrayCreate(ArrayCreate arrayCreate) throws Exception {
Type arrayElementType = arrayCreate.getType().getType();
arrayCreate.getDimension().accept(this);
this.texts.add("push eax\t\t\t; Push array dimension to stack");
if (!(arrayElementType instanceof PrimitiveType && ((PrimitiveType) arrayElementType).getPrimitive().equals(Primitive.CHAR))) {
this.texts.add("imul eax, 4\t\t\t; Multiply the array dimension by byte size");
}
this.texts.add("add eax, 4\t\t\t; Extra space to store array length");
this.texts.add("call __malloc\t\t; Malloc space for the array");
this.texts.add("pop ebx\t\t\t\t; Pop array dimension");
this.texts.add("mov [eax], ebx\t\t; Save array dimension to the beginning of array");
}
private void generateArrayAccess(ArrayAccess arrayAccess) throws Exception {
boolean originDereferenceSetting = this.dereferenceVariable;
this.dereferenceVariable = true;
arrayAccess.getExpression().accept(this);
this.texts.add("push eax\t\t\t; Push array address to stack first");
arrayAccess.getIndex().accept(this);
this.texts.add("pop edx");
this.texts.add("add edx, 4\t\t\t; Shift for array length");
Type arrayElementType = ((ArrayType) arrayAccess.exprType).getType();
if (!(arrayElementType instanceof PrimitiveType && ((PrimitiveType) arrayElementType).getPrimitive().equals(Primitive.CHAR))) {
this.texts.add("imul eax, 4\t\t\t; Multiply the array index by byte size");
}
this.texts.add("add edx, eax\t\t; Shift to index eax");
this.dereferenceVariable = originDereferenceSetting;
if (this.dereferenceVariable) {
this.texts.add("mov eax, [edx]\t\t; Dereference the array element");
} else {
this.texts.add("mov eax, edx\t\t; Address of the array element");
}
}
private void generateLiteral(LiteralPrimary literal) throws Exception {
char c = '\0';
switch (literal.getLiteralType()) {
case BOOLLIT:
if (literal.getValue().equals("true")) {
this.texts.add("mov eax, " + BOOLEAN_TRUE);
} else {
this.texts.add("mov eax, " + BOOLEAN_FALSE);
}
break;
case CHARLIT:
c = literal.getValue().charAt(1);
if (c == '\\' && literal.getValue().length() > 3) {
c = literal.getValue().charAt(2);
if (c == 'b')
c = '\b';
else if (c == 't')
c = '\t';
else if (c == 'n')
c = '\n';
else if (c == 'f')
c = '\f';
else if (c == 'r')
c = '\r';
else if (c == '"')
c = '"';
else if (c == '\'')
c = '\'';
else if (c == '\\')
c = '\\';
else
c = '\0';
}
this.texts.add("mov eax, " + ((int) c));
break;
case INTLIT:
// Assuming int literal within interger range
this.texts.add("mov eax, " + Integer.valueOf(literal.getValue()));
break;
case NULL:
this.texts.add("mov eax, __NULL_LIT_");
this.addExtern("__NULL_LIT_", "");
break;
case STRINGLIT:
this.addVtable("java.lang.String");
this.data.add("__STRING_" + this.literalCount + " dd java.lang.String_VTABLE");
this.data.add("dd " + "__STRING_LIT_" + this.literalCount);
this.data.add("__STRING_LIT_" + this.literalCount + " dd " + (literal.getValue().length() - 2));
this.data.add("dd " + literal.getValue());
this.data.add("align 4");
this.texts.add("mov eax, " + "__STRING_" + this.literalCount);
this.literalCount++;
break;
default:
break;
}
}
private void generateUnaryExpression(UnaryExpression unaryExpr) throws Exception {
Expression operand = unaryExpr.getOperand();
switch (unaryExpr.getOperator()) {
case MINUS:
if (operand instanceof LiteralPrimary) {
// Assuming is int literal
if (((LiteralPrimary) operand).getLiteralType().equals(LiteralType.INTLIT)) {
this.texts.add("mov eax, " + Integer.parseInt("-" + ((LiteralPrimary) operand).getValue()));
}
if (((LiteralPrimary) operand).getLiteralType().equals(LiteralType.CHARLIT)) {
int charNumber = (int) ((LiteralPrimary) operand).getValue().charAt(1);
this.texts.add("mov eax, " + Integer.parseInt("-" + charNumber));
}
} else {
operand.accept(this);
this.texts.add("neg eax");
}
break;
case NOT:
operand.accept(this);
this.texts.add("not eax");
break;
default:
break;
}
}
private void generateLocalVariableDeclaration(LocalVariableDeclaration decl) throws Exception {
Expression initialization = decl.getInitial();
if (initialization != null) {
this.dereferenceVariable = false;
decl.getName().accept(this);
this.dereferenceVariable = true;
this.texts.add("push eax\t\t\t; Push LHS to stack");
initialization.accept(this);
this.texts.add("pop ebx");
this.texts.add("mov [ebx], eax");
this.texts.add("");
}
}
private void generateAssignmentExpression(AssignmentExpression assignExpr) throws Exception {
this.dereferenceVariable = false;
((ASTNode) assignExpr.getLeftHand()).accept(this);
this.dereferenceVariable = true;
this.texts.add("push eax\t\t\t; Push LHS to stack");
assignExpr.getExpression().accept(this);
this.texts.add("pop ebx");
// TODO: Assignment to char array should use `al`
this.texts.add("mov [ebx], eax");
this.texts.add("");
}
private void generateForLoop(ForStatement forStatement) throws Exception {
Integer loopCount = this.loopCount++;
// Init
this.texts.add("__LOOP_INIT_" + loopCount + ":");
((ASTNode) forStatement.getForInit()).accept(this);
this.texts.add("__LOOP_CONDITION_" + loopCount + ":");
forStatement.getForCondition().accept(this);
this.texts.add("cmp eax, " + BOOLEAN_FALSE);
this.texts.add("je __LOOP_END_" + loopCount);
this.texts.add("__LOOP_STATEMENT_" + loopCount + ":");
forStatement.getForStatement().accept(this);
this.texts.add("__LOOP_UPDATE_" + loopCount + ":");
forStatement.getForUpdate().accept(this);
this.texts.add("jmp __LOOP_CONDITION_" + loopCount);
this.texts.add("__LOOP_END_" + loopCount + ":");
}
private void generateWhileStatement(WhileStatement whileStatement) throws ChildTypeUnmatchException, Exception {
Integer loopCount = this.loopCount++;
this.texts.add("__LOOP_CONDITION_" + loopCount + ":");
whileStatement.getWhileCondition().accept(this);
this.texts.add("cmp eax, " + BOOLEAN_FALSE);
this.texts.add("je __LOOP_END_" + loopCount);
this.texts.add("__LOOP_STATEMENT_" + loopCount + ":");
whileStatement.getWhileStatement().accept(this);
this.texts.add("jmp __LOOP_CONDITION_" + loopCount);
this.texts.add("__LOOP_END_" + loopCount + ":");
}
private void generateIfStatement(IfStatement ifStatement) throws ChildTypeUnmatchException, Exception {
Integer conditionCount = this.conditionCount++;
this.texts.add("__IF_CONDITION_" + conditionCount + ":");
ifStatement.getIfCondition().accept(this);
this.texts.add("cmp eax, " + BOOLEAN_FALSE);
this.texts.add("je __ELSE_STATEMENT_" + conditionCount);
this.texts.add("__IF_STATEMENT_" + conditionCount + ":");
ifStatement.getIfStatement().accept(this);
this.texts.add("jmp __IF_END_" + conditionCount);
this.texts.add("__ELSE_STATEMENT_" + conditionCount + ":");
if (ifStatement.getElseStatement() != null) {
ifStatement.getElseStatement().accept(this);
}
this.texts.add("__IF_END_" + conditionCount + ":");
}
private void generateFieldAccess(FieldAccess fieldAccess) throws Exception {
fieldAccess.getPrimary().accept(this);
this.referenceCurrentObject = false;
fieldAccess.getName().accept(this);
this.referenceCurrentObject = true;
}
private void generateStaticFieldDeclaration(FieldDeclaration decl) throws Exception {
// TODO Move below
TableEntry entry = this.getCurrentScope().getParentTypeScope().getFieldDecl((FieldDeclaration) decl);
String label = null;
label = staticLabel(entry.getName());
this.texts.add("global " + label + "_INIT");
this.texts.add(label + "_INIT:");
this.data.add("global " + label);
this.data.add(label + ": dd 0x0");
// Place the node into a list...
statics.add(label);
Expression initialization = decl.getInitial();
if (initialization != null) {
this.dereferenceVariable = false;
decl.getName().accept(this);
this.dereferenceVariable = true;
this.texts.add("push eax\t\t\t; Push LHS to stack");
initialization.accept(this);
this.texts.add("pop ebx");
this.texts.add("mov [ebx], eax");
}
this.texts.add("ret");
}
public void writeStaticInit() throws Exception {
this.texts = new ArrayList<String>();
this.externs = new HashSet<String>();
asmFile = new File("./output/staticInit.s");
this.asmFile.createNewFile();
this.texts.add("global Start_StaticInit\n");
this.texts.add("Start_StaticInit:\n");
for (int i = 0; i < staticInit.size(); i = i + 2) {
this.externs.add(staticInit.get(i));
this.texts.add(staticInit.get(i + 1));
}
BufferedWriter asmWriter = new BufferedWriter(new FileWriter(this.asmFile));
for (String line : this.externs) {
asmWriter.write("extern " + line);
}
asmWriter.newLine();
for (String line : this.texts) {
if (!line.startsWith("global") && !line.startsWith("section")) {
line = "\t" + line;
if (!line.endsWith(":")) {
line = "\t" + line;
}
}
asmWriter.write(line);
}
asmWriter.newLine();
asmWriter.write("ret\n");
asmWriter.close();
}
public void generateSubtypeTable() throws Exception {
LinkedHashMap<TypeDeclaration, Stack<TypeScope>> classHierachyChain = HierarchyChecker.getLinkedClassHierachyChain();
// System.out.println(HierarchyChecker.getLinkedClassHierachyChain());
asmFile = new File("./output/subtypeTable.s");
this.asmFile.createNewFile();
BufferedWriter asmWriter = new BufferedWriter(new FileWriter(this.asmFile));
this.texts = new ArrayList<String>();
this.texts.add("global SubtypeTable\n");
this.texts.add("SubtypeTable:\n");
int classNum = HierarchyChecker.getTotalClassNum();
boolean[] subtypeArr = new boolean[classNum * classNum];
int i = 0;
for (Entry<TypeDeclaration, Stack<TypeScope>> entry : classHierachyChain.entrySet()) {
// System.out.println(entry+"entry:");
// entry.
Stack<TypeScope> classScopes = entry.getValue();
while (!classScopes.empty()) {
TypeScope classScope = classScopes.pop();
if (classScope.getReferenceNode() instanceof TypeDeclaration) {
int index = ((TypeDeclaration) classScope.getReferenceNode()).getHierarchyTableIndex();
subtypeArr[i * classNum + index] = true;
}
}
i++;
}
for (i = 0; i < subtypeArr.length; i++) {
if (subtypeArr[i]) {
this.texts.add("dd 0xffffffff");
} else {
this.texts.add("dd 0x0");
}
}
for (String line : this.texts) {
asmWriter.write(line);
asmWriter.newLine();
}
asmWriter.close();
}
public void copyNullAsm() throws Exception {
File nullAsm = new File("resources/null.s");
File outputAsm = new File("output/null.s");
Files.copy(nullAsm, outputAsm);
}
}
| src/ca/uwaterloo/joos/codegen/CodeGenerator.java | package ca.uwaterloo.joos.codegen;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import java.util.Stack;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.google.common.io.Files;
import ca.uwaterloo.joos.Main;
import ca.uwaterloo.joos.ast.ASTNode;
import ca.uwaterloo.joos.ast.ASTNode.ChildTypeUnmatchException;
import ca.uwaterloo.joos.ast.FileUnit;
import ca.uwaterloo.joos.ast.Modifiers;
import ca.uwaterloo.joos.ast.Modifiers.Modifier;
import ca.uwaterloo.joos.ast.decl.BodyDeclaration;
import ca.uwaterloo.joos.ast.decl.ClassDeclaration;
import ca.uwaterloo.joos.ast.decl.ConstructorDeclaration;
import ca.uwaterloo.joos.ast.decl.FieldDeclaration;
import ca.uwaterloo.joos.ast.decl.LocalVariableDeclaration;
import ca.uwaterloo.joos.ast.decl.MethodDeclaration;
import ca.uwaterloo.joos.ast.decl.OnDemandImport;
import ca.uwaterloo.joos.ast.decl.PackageDeclaration;
import ca.uwaterloo.joos.ast.decl.ParameterDeclaration;
import ca.uwaterloo.joos.ast.decl.SingleImport;
import ca.uwaterloo.joos.ast.decl.TypeDeclaration;
import ca.uwaterloo.joos.ast.decl.VariableDeclaration;
import ca.uwaterloo.joos.ast.expr.AssignmentExpression;
import ca.uwaterloo.joos.ast.expr.ClassCreateExpression;
import ca.uwaterloo.joos.ast.expr.Expression;
import ca.uwaterloo.joos.ast.expr.InfixExpression;
import ca.uwaterloo.joos.ast.expr.InfixExpression.InfixOperator;
import ca.uwaterloo.joos.ast.expr.MethodInvokeExpression;
import ca.uwaterloo.joos.ast.expr.UnaryExpression;
import ca.uwaterloo.joos.ast.expr.name.Name;
import ca.uwaterloo.joos.ast.expr.name.QualifiedName;
import ca.uwaterloo.joos.ast.expr.name.SimpleName;
import ca.uwaterloo.joos.ast.expr.primary.ArrayAccess;
import ca.uwaterloo.joos.ast.expr.primary.ArrayCreate;
import ca.uwaterloo.joos.ast.expr.primary.LiteralPrimary;
import ca.uwaterloo.joos.ast.expr.primary.LiteralPrimary.LiteralType;
import ca.uwaterloo.joos.ast.expr.primary.Primary;
import ca.uwaterloo.joos.ast.statement.Block;
import ca.uwaterloo.joos.ast.statement.ForStatement;
import ca.uwaterloo.joos.ast.statement.IfStatement;
import ca.uwaterloo.joos.ast.statement.ReturnStatement;
import ca.uwaterloo.joos.ast.statement.WhileStatement;
import ca.uwaterloo.joos.ast.type.ArrayType;
import ca.uwaterloo.joos.ast.type.PrimitiveType;
import ca.uwaterloo.joos.ast.type.PrimitiveType.Primitive;
import ca.uwaterloo.joos.ast.type.ReferenceType;
import ca.uwaterloo.joos.ast.type.Type;
import ca.uwaterloo.joos.checker.HierarchyChecker;
import ca.uwaterloo.joos.symboltable.BlockScope;
import ca.uwaterloo.joos.symboltable.Scope;
import ca.uwaterloo.joos.symboltable.SemanticsVisitor;
import ca.uwaterloo.joos.symboltable.SymbolTable;
import ca.uwaterloo.joos.symboltable.TableEntry;
import ca.uwaterloo.joos.symboltable.TypeScope;
public class CodeGenerator extends SemanticsVisitor {
public static final Logger logger = Main.getLogger(CodeGenerator.class);
protected static final String BOOLEAN_TRUE = "0xffffffff";
protected static final String BOOLEAN_FALSE = "0x0";
protected static final String NULL = "0x0";
protected File asmFile = null;
protected static File startFile = null;
protected Set<String> externs = null;
protected List<String> texts = null;
protected List<String> data = null;
protected List<String> statics = null;
protected static List<String> staticInit = new ArrayList<String>();
private String methodLabel = null;
private Integer literalCount = 0;
private Integer comparisonCount = 0;
private Integer loopCount = 0;
private Integer conditionCount = 0;
private Boolean dereferenceVariable = true;
private Set<Class<?>> complexNodes = null;
public CodeGenerator(SymbolTable table) {
super(table);
logger.setLevel(Level.FINER);
this.complexNodes = new HashSet<Class<?>>();
this.complexNodes.add(ReferenceType.class);
this.complexNodes.add(PackageDeclaration.class);
this.complexNodes.add(SingleImport.class);
this.complexNodes.add(OnDemandImport.class);
}
private void initialize() {
this.asmFile = null;
this.statics = new ArrayList<String>();
this.externs = new HashSet<String>();
this.texts = new ArrayList<String>();
this.data = new ArrayList<String>();
this.methodLabel = null;
this.literalCount = 0;
this.comparisonCount = 0;
this.loopCount = 0;
this.dereferenceVariable = true;
// Place the runtime.s externs
this.externs.add("__malloc");
this.externs.add("__debexit");
this.externs.add("__exception");
this.externs.add("NATIVEjava.io.OutputStream.nativeWrite");
this.externs.add("SubtypeTable");
this.texts.add("");
this.texts.add("section .text");
this.texts.add("");
this.data.add("");
this.data.add("section .data");
this.data.add("");
}
private void addExtern(String label, String declarationName) {
if (!this.getCurrentScope().getParentTypeScope().getSymbols().containsKey(declarationName)) {
logger.fine("Adding extern " + declarationName + " within scope " + this.getCurrentScope().getParentTypeScope());
this.externs.add(label);
}
}
private void addVtable(String fullyQualifiedTypeName) {
if (!this.getCurrentScope().getParentTypeScope().getName().equals(fullyQualifiedTypeName)) {
this.externs.add(fullyQualifiedTypeName + "_VTABLE");
}
}
@Override
public void willVisit(ASTNode node) throws Exception {
super.willVisit(node);
if (node instanceof FileUnit) {
this.initialize();
} else if (node instanceof TypeDeclaration) {
// Construct output file
// String filename = this.getCurrentScope().getName();
String filename = node.getIdentifier();
// filename = filename.replace('.', '/');
filename = "./output/" + filename + ".s";
logger.finer(filename);
this.asmFile = new File(filename);
} else if (node instanceof MethodDeclaration) {
Modifiers modifiers = ((MethodDeclaration) node).getModifiers();
if (!modifiers.containModifier(Modifier.NATIVE) && !modifiers.containModifier(Modifier.ABSTRACT)) {
// Define method labels
this.methodLabel = methodLabel(this.getCurrentScope().getName());
if (((MethodDeclaration) node).getName().getSimpleName().equals("test") && modifiers.containModifier(Modifier.STATIC)) {
this.methodLabel = "_start";
startFile = this.asmFile;
}
this.texts.add("global " + this.methodLabel);
this.texts.add(this.methodLabel + ":");
// Preamble
this.texts.add("push ebp\t\t\t; Preamble");
this.texts.add("mov ebp, esp");
// Allocate space for local variables
this.texts.add("sub esp, " + (((MethodDeclaration) node).totalLocalVariables * 4));
// Push registers
// this.texts.add("push eax"); // Leave eax as return value
this.texts.add("push ebx");
this.texts.add("push ecx");
this.texts.add("push edx");
this.texts.add("");
if (((MethodDeclaration) node).getName().getSimpleName().equals("test") && modifiers.containModifier(Modifier.STATIC)) {
this.texts.add("call Start_StaticInit");
}
if (node instanceof ConstructorDeclaration) {
// Call any superclass constructor
// TODO call super constructor...
// Get the class holding the constructor
ClassDeclaration cd = (ClassDeclaration) this.getCurrentScope().getParentTypeScope().getReferenceNode();
List<FieldDeclaration> fds = cd.getBody().getFields();
this.texts.add("push eax\t\t\t; Push the new object address");
this.texts.add("mov ebx, [ebp + 8]\t\t; Current Object");
this.texts.add("add ebx, 4\t\t\t; First space reserved");
// Initialize field variables here
for (FieldDeclaration fd : fds) {
if (fd.getInitial() != null) {
this.texts.add("push ebx\t\t\t; Push address of field");
fd.getInitial().accept(this);
this.texts.add("pop ebx\t\t\t; Pop LHS");
this.texts.add("mov [ebx], eax");
}
this.texts.add("add ebx, 4\t\t\t; ");
}
}
}
}
}
@Override
public boolean visit(ASTNode node) throws Exception {
logger.finest("Visiting " + node);
if (node instanceof MethodInvokeExpression) {
this.generateMethodInvoke((MethodInvokeExpression) node);
return false;
} else if (node instanceof ClassCreateExpression) {
this.generateClassCreate((ClassCreateExpression) node);
return false;
} else if (node instanceof InfixExpression) {
this.generateInfixExpression((InfixExpression) node);
return false;
} else if (node instanceof ArrayCreate) {
this.generateArrayCreate((ArrayCreate) node);
return false;
} else if (node instanceof ArrayAccess) {
this.generateArrayAccess((ArrayAccess) node);
return false;
} else if (node instanceof LiteralPrimary) {
this.generateLiteral((LiteralPrimary) node);
return false;
} else if (node instanceof UnaryExpression) {
this.generateUnaryExpression((UnaryExpression) node);
return false;
} else if (node instanceof LocalVariableDeclaration) {
this.generateLocalVariableDeclaration((LocalVariableDeclaration) node);
return false;
} else if (node instanceof AssignmentExpression) {
this.generateAssignmentExpression((AssignmentExpression) node);
return false;
} else if (node instanceof ForStatement) {
this.generateForLoop((ForStatement) node);
return false;
} else if (node instanceof WhileStatement) {
this.generateWhileStatement((WhileStatement) node);
return false;
} else if (node instanceof IfStatement) {
this.generateIfStatement((IfStatement) node);
return false;
} else if (node instanceof FieldDeclaration) {
if (((FieldDeclaration) node).getModifiers().containModifier(Modifier.STATIC)) {
this.generateStaticFieldDeclaration((FieldDeclaration) node);
}
return false;
} else if (node instanceof MethodDeclaration) {
Block body = ((MethodDeclaration) node).getBody();
if (body != null) {
body.accept(this);
}
return false;
} else if (node instanceof Name) {
this.generateVariableAccess((Name) node);
return false;
}
return !this.complexNodes.contains(node.getClass());
}
@Override
public void didVisit(ASTNode node) throws Exception {
if (node instanceof FileUnit) {
// File content generated, write to file
File dir = this.asmFile.getParentFile();
for (String label : this.statics) {
staticInit.add("\t" + label + "_INIT\n");
staticInit.add("\tcall " + label + "_INIT" + '\n');
}
if (dir != null) {
dir.mkdirs();
}
this.asmFile.createNewFile();
BufferedWriter asmWriter = new BufferedWriter(new FileWriter(this.asmFile));
for (String line : this.externs) {
asmWriter.write("extern " + line);
asmWriter.newLine();
}
for (String line : this.texts) {
if (!line.startsWith("global") && !line.startsWith("section")) {
line = "\t" + line;
if (!line.endsWith(":")) {
line = "\t" + line;
}
}
asmWriter.write(line);
asmWriter.newLine();
}
for (String line : this.data) {
asmWriter.write(line);
asmWriter.newLine();
}
asmWriter.close();
} else if (node instanceof TypeDeclaration) {
this.texts.add("global " + this.getCurrentScope().getName() + "_VTABLE");
this.texts.add(this.getCurrentScope().getName() + "_VTABLE:");
this.texts.add("dd " + ((TypeDeclaration) node).getHierarchyTableIndex());
for (Entry<Integer, Scope> entry : ((TypeDeclaration) node).getSignatures().entrySet()) {
Scope methodScope = entry.getValue();
if (!this.getCurrentScope().getSymbols().containsKey(methodScope.getName())) {
this.externs.add(methodLabel(methodScope.getName()));
}
if (((MethodDeclaration) methodScope.getReferenceNode()).getModifiers().containModifier(Modifier.STATIC) && ((MethodDeclaration) methodScope.getReferenceNode()).getName().getName().equals("test")) {
this.externs.add("Start_StaticInit");
this.texts.add("dd _start");
startFile = asmFile;
} else
this.texts.add("dd " + methodLabel(methodScope.getName()));
}
this.texts.add("");
} else if (node instanceof MethodDeclaration) {
Modifiers modifiers = ((MethodDeclaration) node).getModifiers();
if (node instanceof ConstructorDeclaration) {
this.texts.add("pop eax\t\t\t; Restore THIS pointer to eax");
}
if (!modifiers.containModifier(Modifier.NATIVE) && !modifiers.containModifier(Modifier.ABSTRACT)) {
// Postamble
this.texts.add(this.methodLabel + "_END:");
// Pop registers
this.texts.add("pop edx\t\t\t\t; Postamble");
this.texts.add("pop ecx");
this.texts.add("pop ebx");
// this.texts.add("pop eax"); // Leave eax as return value
// Deallocate space for local variables
this.texts.add("add esp, " + (((MethodDeclaration) node).totalLocalVariables * 4));
// Restore frame pointer
this.texts.add("pop ebp");
if (this.methodLabel.equals("_start")) {
this.texts.add("call __debexit");
} else {
this.texts.add("ret");
}
this.texts.add("");
}
} else if (node instanceof ReturnStatement) {
this.texts.add("jmp " + this.methodLabel + "_END");
}
super.didVisit(node);
}
private static String methodLabel(String methodSignature) {
String label = methodSignature.replaceAll("[(),]", "_");
label = label.replaceAll("\\[\\]", "_ARRAY");
return label;
}
private static String staticLabel(String fieldName) {
String label = "STATIC" + fieldName;
return label;
}
private void generateVariableDereference(TableEntry entry) throws Exception {
VariableDeclaration varDecl = (VariableDeclaration) entry.getNode();
if (varDecl instanceof ParameterDeclaration) {
this.texts.add("mov eax, [ebp + " + (4 + varDecl.getIndex() * 4) + "]\t; Accessing parameter: " + entry.getName());
} else if (varDecl instanceof FieldDeclaration) {
if (varDecl.getModifiers().containModifier(Modifier.STATIC)) {
String label = staticLabel(entry.getName());
this.addExtern(label, entry.getName());
this.texts.add("mov eax, [" + label + "]\t; Accessing static: " + entry.getName());
} else {
this.texts.add("mov eax, [eax + " + (varDecl.getIndex() * 4) + "]\t; Accessing field: " + entry.getName());
}
} else if (varDecl instanceof LocalVariableDeclaration) {
this.texts.add("mov eax, [ebp - " + (varDecl.getIndex() * 4) + "]\t; Accessing local: " + entry.getName());
}
}
private void generateVariableAddress(TableEntry entry) throws Exception {
VariableDeclaration varDecl = (VariableDeclaration) entry.getNode();
if (varDecl instanceof ParameterDeclaration) {
this.texts.add("mov eax, ebp");
this.texts.add("add eax, " + (4 + varDecl.getIndex() * 4) + "\t\t\t; Address of parameter: " + entry.getName());
} else if (varDecl instanceof FieldDeclaration) {
if (varDecl.getModifiers().containModifier(Modifier.STATIC)) {
String label = staticLabel(entry.getName());
this.addExtern(label, entry.getName());
this.texts.add("mov eax, " + label + "\t; Address of static: " + entry.getName());
} else {
this.texts.add("add eax, " + (varDecl.getIndex() * 4) + "\t\t\t; Address of field: " + entry.getName());
}
} else if (varDecl instanceof LocalVariableDeclaration) {
this.texts.add("mov eax, ebp");
this.texts.add("sub eax, " + (varDecl.getIndex() * 4) + "\t\t\t; Address of local: " + entry.getName());
}
}
private void generateVariableAccess(Name name) throws Exception {
int i = 0;
if (name instanceof SimpleName) {
TableEntry entry = ((SimpleName) name).getOriginalDeclaration();
if (entry == null) {
String field = ((SimpleName) name).getName();
if (field.equals("length")) {
this.texts.add("mov eax, [eax]\t\t; Fetch array length");
} else {
throw new Exception("Unknown field " + field);
}
} else if (this.dereferenceVariable) {
this.texts.add("mov eax, [ebp + 8]\t; Current object");
this.generateVariableDereference(entry);
} else {
this.texts.add("mov eax, [ebp + 8]\t; Current object");
this.generateVariableAddress(entry);
}
} else if (name instanceof QualifiedName) {
this.texts.add("mov eax, [ebp + 8]\t; Current object");
TableEntry entry = ((QualifiedName) name).getOriginalDeclaration();
if (entry.getNode() instanceof VariableDeclaration) {
this.generateVariableDereference(entry);
}
List<TableEntry> originalDeclarations = ((QualifiedName) name).originalDeclarations;
for (i = 0; i < originalDeclarations.size(); i++) {
entry = originalDeclarations.get(i);
if (i != originalDeclarations.size() - 1 || this.dereferenceVariable) {
this.generateVariableDereference(entry);
} else {
this.generateVariableAddress(entry);
}
}
List<String> components = ((QualifiedName) name).getComponents();
if (components.size() - originalDeclarations.size() > 1) {
String field = components.get(components.size() - 1);
if (field.equals("length")) {
this.texts.add("mov eax, [eax]\t\t; Fetch array size");
} else {
throw new Exception("Unknown field " + field);
}
}
}
}
private void generateMethodInvoke(MethodInvokeExpression methodInvoke) throws Exception {
// Push parameters to stack
List<Expression> args = methodInvoke.getArguments();
int i = args.size();
for (i--; i >= 0; i--) {
Expression arg = args.get(i);
// Generate code for arg
arg.accept(this);
this.texts.add("push eax\t\t\t; Push parameter #" + (i + 1) + " to stack");
}
String methodName = methodInvoke.fullyQualifiedName;
String methodLabel = methodLabel(methodName);
if (methodLabel.equals("java.io.OutputStream.nativeWrite_INT__")) {
// Calling native write
methodLabel = "NATIVEjava.io.OutputStream.nativeWrite";
this.texts.add("pop eax\t\t\t\t; Pop parameter for native write");
this.texts.add("push ebx");
this.texts.add("push ecx");
this.texts.add("push edx");
this.texts.add("call " + methodLabel);
this.texts.add("pop edx");
this.texts.add("pop ecx");
this.texts.add("pop ebx");
this.texts.add("");
return;
}
// Push THIS to stack, THIS should be the address of the object
Primary primary = methodInvoke.getPrimary();
Name name = methodInvoke.getName();
if (primary != null) {
// If primary is not null, means is invoking method on a primary
primary.accept(this);
} else if (name instanceof QualifiedName) {
logger.finest("Generating method invoke for name " + name + " with #" + ((QualifiedName) name).originalDeclarations.size() + " entries");
this.texts.add("mov eax, [ebp + 8]\t; Current object");
List<TableEntry> originalDeclarations = ((QualifiedName) name).originalDeclarations;
for (TableEntry entry : originalDeclarations) {
this.generateVariableDereference(entry);
}
} else if (name instanceof SimpleName) {
// Invoking method within same Type, THIS is parameter #0
logger.finest("Generating method invoke for simple name " + name);
this.texts.add("mov eax, [ebp + 8]\t; Current object");
}
this.texts.add("push eax\t\t\t; Push THIS as parameter #0");
// Invoke the method
BlockScope methodBlock = this.table.getBlock(methodInvoke.fullyQualifiedName);
BodyDeclaration methodNode = (BodyDeclaration) methodBlock.getReferenceNode();
if (methodNode.getModifiers().containModifier(Modifier.STATIC)) {
this.texts.add("mov edx, " + methodBlock.getParentTypeScope().getName() + "_VTABLE");
} else {
this.texts.add("mov edx, [eax]\t; Dereference for the address of VTable");
}
this.texts.add("call [edx + " + (methodNode.getIndex() * 4 + 4) + "]\t; Call " + methodInvoke.fullyQualifiedName);
// Pop THIS from stack
this.texts.add("pop edx\t\t\t\t; Pop THIS");
// Pop parameters from stack
for (i = 0; i < args.size(); i++) {
this.texts.add("pop edx\t\t\t\t; Pop parameter #" + (i + 1) + " from stack");
}
// Add the vtable containing the method label
this.addVtable(methodBlock.getParentTypeScope().getName());
this.texts.add("");
}
private void generateClassCreate(ClassCreateExpression classCreate) throws Exception {
// Push parameters to stack
List<Expression> args = classCreate.getArguments();
int i = args.size();
for (i--; i >= 0; i--) {
Expression arg = args.get(i);
// Generate code for arg
arg.accept(this);
this.texts.add("push eax\t\t\t; Push parameter #" + i + " to stack");
}
// Allocate space for the new object
TypeScope typeScope = this.table.getType(classCreate.getType().getFullyQualifiedName());
TypeDeclaration typeDecl = (TypeDeclaration) typeScope.getReferenceNode();
this.texts.add("mov eax, " + (4 + typeDecl.totalFieldDeclarations * 4) + "\t\t\t; Size of the object");
this.texts.add("call __malloc");
this.texts.add("push eax\t\t\t; Push new object pointer as THIS");
this.addVtable(typeScope.getName());
this.texts.add("mov ebx, " + typeScope.getName() + "_VTABLE");
this.texts.add("mov [eax], ebx");
// Invoke the constructor
String constructorName = classCreate.fullyQualifiedName;
String constructorLabel = methodLabel(constructorName);
this.texts.add("call " + constructorLabel);
// Pop THIS from stack
this.texts.add("pop edx\t\t\t\t; Pop THIS");
// Pop parameters from stack
for (i = 0; i < args.size(); i++) {
this.texts.add("pop edx\t\t\t\t; Pop parameters #" + i + " from stack");
}
// Add to extern if is not local method
if (!this.getCurrentScope().getParentTypeScope().getSymbols().containsKey(constructorName)) {
this.externs.add(constructorLabel);
}
this.texts.add("");
}
private void generateValueToString(Type exprType) throws Exception {
if (exprType instanceof PrimitiveType) {
String valueOfLabel = "java.lang.String.valueOf_" + exprType.getFullyQualifiedName() + "__";
this.texts.add("push eax\t\t\t; Push the primitive as perameter #1");
this.texts.add("push eax\t\t\t; Push something as a fake THIS");
this.texts.add("call " + valueOfLabel);
this.addExtern(valueOfLabel, "java.lang.String.valueOf(" + exprType.getFullyQualifiedName() + ",)");
this.texts.add("pop edx");
this.texts.add("pop edx");
} else if (exprType instanceof ReferenceType) {
this.texts.add("push eax\t\t\t; Push the reference variable address as THIS");
this.texts.add("mov eax, [eax]\t; Obtain VTable address");
this.texts.add("add eax, 12\t\t; Shift to the index of toString");
this.texts.add("mov eax, [eax]\t; Dereference address of toString");
this.texts.add("call eax");
this.texts.add("pop edx");
}
}
private void generateStringConcat(Expression op1, Expression op2) throws Exception {
Type op1Type = op1.exprType;
Type op2Type = op2.exprType;
// op1 at EAX, op2 at EDX
if (!op1Type.getFullyQualifiedName().equals("java.lang.String")) {
this.texts.add("push edx\t\t\t; Push op2 to stack first");
this.generateValueToString(op1Type);
this.texts.add("pop edx\t\t\t\t; Pop op2");
}
// op1 String on stack, op2 at EDX
if (!op2Type.getFullyQualifiedName().equals("java.lang.String")) {
this.texts.add("push eax\t\t\t; Push op1 String to stack");
this.texts.add("mov eax, edx");
this.generateValueToString(op2Type);
this.texts.add("mov edx, eax");
this.texts.add("pop eax\t\t\t\t; Pop op1 String");
}
// op1 String at EAX, op2 String at EDX. Invoke
// java.lang.String.concat(java.lang.String) now.
this.texts.add("push edx");
this.texts.add("push eax");
this.texts.add("call java.lang.String.concat_java.lang.String__");
this.addExtern("java.lang.String.concat_java.lang.String__", "java.lang.String.concat(java.lang.String,)");
this.texts.add("pop edx");
this.texts.add("pop edx");
}
private void generateInfixExpression(InfixExpression infixExpr) throws Exception {
InfixOperator operator = infixExpr.getOperator();
List<Expression> operands = infixExpr.getOperands();
int comparisonCount = this.comparisonCount;
this.comparisonCount++;
// Instance of
if (operator.equals(InfixOperator.INSTANCEOF)) {
Expression operand = operands.get(0);
Type rhsType = infixExpr.getRHS();
operand.accept(this);
this.texts.add("mov eax,[eax]\t\t\t; get Vtable of current object");
this.texts.add("mov eax,[eax]\t\t\t; get the index of current object");
TypeScope typeScope = this.table.getType(rhsType.getFullyQualifiedName());
TypeDeclaration typeNode = (TypeDeclaration) typeScope.getReferenceNode();
this.texts.add("mov ebx," + typeNode.getHierarchyTableIndex() + "\t; get the index of RHS of instanceof");
this.texts.add("mov edx," + HierarchyChecker.getTotalClassNum() + "\t; get the fixed shift");
this.texts.add("mov ecx, SubtypeTable\t; get the subtypeTable");
this.texts.add("imul eax, edx\t; Multiply row with row width");
this.texts.add("add eax, ecx");
this.texts.add("add eax, ebx");
this.texts.add("mov eax, [eax]\t; get the subtype flag value");
return;
}
if (operator.equals(InfixOperator.AND) || operator.equals(InfixOperator.OR)) {
operands.get(0).accept(this);
} else {
// Generate code for the second operand and push to the stack
operands.get(1).accept(this);
this.texts.add("push eax\t\t\t; Push second operand value");
// Generate code for the first operand and result stay in eax
operands.get(0).accept(this);
this.texts.add("pop edx\t\t\t\t; Pop second operand value to edx");
}
switch (operator) {
case AND:
this.texts.add("cmp eax, " + BOOLEAN_FALSE);
this.texts.add("je " + "__COMPARISON_FALSE_" + comparisonCount + "\t; If Lazy AND reach false, skip second operands");
operands.get(1).accept(this);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case BAND:
this.texts.add("and eax, edx");
break;
case BOR:
this.texts.add("or eax, edx");
break;
case EQ:
this.texts.add("cmp eax, edx");
this.texts.add("je " + "__COMPARISON_TRUE_" + comparisonCount);
this.texts.add("mov eax, " + BOOLEAN_FALSE);
this.texts.add("jmp " + "__COMPARISON_FALSE_" + comparisonCount);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
this.texts.add("mov eax, " + BOOLEAN_TRUE);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case GEQ:
this.texts.add("cmp eax, edx");
this.texts.add("jge " + "__COMPARISON_TRUE_" + comparisonCount);
this.texts.add("mov eax, " + BOOLEAN_FALSE);
this.texts.add("jmp " + "__COMPARISON_FALSE_" + comparisonCount);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
this.texts.add("mov eax, " + BOOLEAN_TRUE);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case GT:
this.texts.add("cmp eax, edx");
this.texts.add("jg " + "__COMPARISON_TRUE_" + comparisonCount);
this.texts.add("mov eax, " + BOOLEAN_FALSE);
this.texts.add("jmp " + "__COMPARISON_FALSE_" + comparisonCount);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
this.texts.add("mov eax, " + BOOLEAN_TRUE);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case LEQ:
this.texts.add("cmp eax, edx");
this.texts.add("jle " + "__COMPARISON_TRUE_" + comparisonCount);
this.texts.add("mov eax, " + BOOLEAN_FALSE);
this.texts.add("jmp " + "__COMPARISON_FALSE_" + comparisonCount);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
this.texts.add("mov eax, " + BOOLEAN_TRUE);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case LT:
this.texts.add("cmp eax, edx");
this.texts.add("jl " + "__COMPARISON_TRUE_" + comparisonCount);
this.texts.add("mov eax, " + BOOLEAN_FALSE);
this.texts.add("jmp " + "__COMPARISON_FALSE_" + comparisonCount);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
this.texts.add("mov eax, " + BOOLEAN_TRUE);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case MINUS:
// eax = first operand - second operand
this.texts.add("sub eax, edx");
break;
case NEQ:
this.texts.add("cmp eax, edx");
this.texts.add("jne " + "__COMPARISON_TRUE_" + comparisonCount);
this.texts.add("mov eax, " + BOOLEAN_FALSE);
this.texts.add("jmp " + "__COMPARISON_FALSE_" + comparisonCount);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
this.texts.add("mov eax, " + BOOLEAN_TRUE);
this.texts.add("__COMPARISON_FALSE_" + comparisonCount + ":");
break;
case OR:
this.texts.add("cmp eax, " + BOOLEAN_FALSE);
this.texts.add("jne " + "__COMPARISON_TRUE_" + comparisonCount + "\t; If Lazy OR reach true, skip second operands");
operands.get(1).accept(this);
this.texts.add("__COMPARISON_TRUE_" + comparisonCount + ":");
break;
case PERCENT:
// eax = first operand % second operand
this.texts.add("cmp edx, 0\t\t\t; Check zero divider");
this.texts.add("je __exception\t\t; Throw exception");
this.texts.add("mov ebx, 0");
this.texts.add("xchg edx, ebx\t\t; Set edx to 0, and ebx to be the divider");
this.texts.add("idiv ebx\t\t\t; Divide edx:eax with ebx");
this.texts.add("mov eax, edx\t\t; Move the remainder to eax");
break;
case PLUS:
// eax = first operand + second operand
if (operands.get(0).exprType.getFullyQualifiedName().equals("java.lang.String") || operands.get(1).exprType.getFullyQualifiedName().equals("java.lang.String")) {
this.generateStringConcat(operands.get(0), operands.get(1));
} else {
this.texts.add("add eax, edx");
}
break;
case SLASH:
// eax = first operand / second operand
this.texts.add("cmp edx, 0\t\t\t; Check zero divider");
this.texts.add("je __exception\t\t; Throw exception");
this.texts.add("mov ebx, 0");
this.texts.add("xchg edx, ebx\t\t; Set edx to 0, and ebx to be the divider");
this.texts.add("idiv ebx\t\t\t; Divide edx:eax with ebx, quotient will be in eax");
break;
case STAR:
// eax = first operand * second operand
this.texts.add("imul eax, edx");
break;
default:
throw new Exception("Unkown infix operator type " + operator);
}
this.texts.add("");
}
private void generateArrayCreate(ArrayCreate arrayCreate) throws Exception {
Type arrayElementType = arrayCreate.getType().getType();
arrayCreate.getDimension().accept(this);
this.texts.add("push eax\t\t\t; Push array dimension to stack");
if (!(arrayElementType instanceof PrimitiveType && ((PrimitiveType) arrayElementType).getPrimitive().equals(Primitive.CHAR))) {
this.texts.add("imul eax, 4\t\t\t; Multiply the array dimension by byte size");
}
this.texts.add("add eax, 4\t\t\t; Extra space to store array length");
this.texts.add("call __malloc\t\t; Malloc space for the array");
this.texts.add("pop ebx\t\t\t\t; Pop array dimension");
this.texts.add("mov [eax], ebx\t\t; Save array dimension to the beginning of array");
}
private void generateArrayAccess(ArrayAccess arrayAccess) throws Exception {
boolean originDereferenceSetting = this.dereferenceVariable;
this.dereferenceVariable = true;
arrayAccess.getExpression().accept(this);
this.texts.add("push eax\t\t\t; Push array address to stack first");
arrayAccess.getIndex().accept(this);
this.texts.add("pop edx");
this.texts.add("add edx, 4\t\t\t; Shift for array length");
Type arrayElementType = ((ArrayType) arrayAccess.exprType).getType();
if (!(arrayElementType instanceof PrimitiveType && ((PrimitiveType) arrayElementType).getPrimitive().equals(Primitive.CHAR))) {
this.texts.add("imul eax, 4\t\t\t; Multiply the array index by byte size");
}
this.texts.add("add edx, eax\t\t; Shift to index eax");
this.dereferenceVariable = originDereferenceSetting;
if (this.dereferenceVariable) {
this.texts.add("mov eax, [edx]\t\t; Dereference the array element");
} else {
this.texts.add("mov eax, edx\t\t; Address of the array element");
}
}
private void generateLiteral(LiteralPrimary literal) throws Exception {
char c = '\0';
switch (literal.getLiteralType()) {
case BOOLLIT:
if (literal.getValue().equals("true")) {
this.texts.add("mov eax, " + BOOLEAN_TRUE);
} else {
this.texts.add("mov eax, " + BOOLEAN_FALSE);
}
break;
case CHARLIT:
c = literal.getValue().charAt(1);
if (c == '\\' && literal.getValue().length() > 3) {
c = literal.getValue().charAt(2);
if (c == 'b')
c = '\b';
else if (c == 't')
c = '\t';
else if (c == 'n')
c = '\n';
else if (c == 'f')
c = '\f';
else if (c == 'r')
c = '\r';
else if (c == '"')
c = '"';
else if (c == '\'')
c = '\'';
else if (c == '\\')
c = '\\';
else
c = '\0';
}
this.texts.add("mov eax, " + ((int) c));
break;
case INTLIT:
// Assuming int literal within interger range
this.texts.add("mov eax, " + Integer.valueOf(literal.getValue()));
break;
case NULL:
this.texts.add("mov eax, __NULL_LIT_");
this.addExtern("__NULL_LIT_", "");
break;
case STRINGLIT:
this.addVtable("java.lang.String");
this.data.add("__STRING_" + this.literalCount + " dd java.lang.String_VTABLE");
this.data.add("dd " + "__STRING_LIT_" + this.literalCount);
this.data.add("__STRING_LIT_" + this.literalCount + " dd " + (literal.getValue().length() - 2));
this.data.add("dd " + literal.getValue());
this.data.add("align 4");
this.texts.add("mov eax, " + "__STRING_" + this.literalCount);
this.literalCount++;
break;
default:
break;
}
}
private void generateUnaryExpression(UnaryExpression unaryExpr) throws Exception {
Expression operand = unaryExpr.getOperand();
switch (unaryExpr.getOperator()) {
case MINUS:
if (operand instanceof LiteralPrimary) {
// Assuming is int literal
if (((LiteralPrimary) operand).getLiteralType().equals(LiteralType.INTLIT)) {
this.texts.add("mov eax, " + Integer.parseInt("-" + ((LiteralPrimary) operand).getValue()));
}
if (((LiteralPrimary) operand).getLiteralType().equals(LiteralType.CHARLIT)) {
int charNumber = (int) ((LiteralPrimary) operand).getValue().charAt(1);
this.texts.add("mov eax, " + Integer.parseInt("-" + charNumber));
}
} else {
operand.accept(this);
this.texts.add("neg eax");
}
break;
case NOT:
operand.accept(this);
this.texts.add("not eax");
break;
default:
break;
}
}
private void generateLocalVariableDeclaration(LocalVariableDeclaration decl) throws Exception {
Expression initialization = decl.getInitial();
if (initialization != null) {
this.dereferenceVariable = false;
decl.getName().accept(this);
this.dereferenceVariable = true;
this.texts.add("push eax\t\t\t; Push LHS to stack");
initialization.accept(this);
this.texts.add("pop ebx");
this.texts.add("mov [ebx], eax");
this.texts.add("");
}
}
private void generateAssignmentExpression(AssignmentExpression assignExpr) throws Exception {
this.dereferenceVariable = false;
((ASTNode) assignExpr.getLeftHand()).accept(this);
this.dereferenceVariable = true;
this.texts.add("push eax\t\t\t; Push LHS to stack");
assignExpr.getExpression().accept(this);
this.texts.add("pop ebx");
// TODO: Assignment to char array should use `al`
this.texts.add("mov [ebx], eax");
this.texts.add("");
}
private void generateForLoop(ForStatement forStatement) throws Exception {
Integer loopCount = this.loopCount++;
// Init
this.texts.add("__LOOP_INIT_" + loopCount + ":");
((ASTNode) forStatement.getForInit()).accept(this);
this.texts.add("__LOOP_CONDITION_" + loopCount + ":");
forStatement.getForCondition().accept(this);
this.texts.add("cmp eax, " + BOOLEAN_FALSE);
this.texts.add("je __LOOP_END_" + loopCount);
this.texts.add("__LOOP_STATEMENT_" + loopCount + ":");
forStatement.getForStatement().accept(this);
this.texts.add("__LOOP_UPDATE_" + loopCount + ":");
forStatement.getForUpdate().accept(this);
this.texts.add("jmp __LOOP_CONDITION_" + loopCount);
this.texts.add("__LOOP_END_" + loopCount + ":");
}
private void generateWhileStatement(WhileStatement whileStatement) throws ChildTypeUnmatchException, Exception {
Integer loopCount = this.loopCount++;
this.texts.add("__LOOP_CONDITION_" + loopCount + ":");
whileStatement.getWhileCondition().accept(this);
this.texts.add("cmp eax, " + BOOLEAN_FALSE);
this.texts.add("je __LOOP_END_" + loopCount);
this.texts.add("__LOOP_STATEMENT_" + loopCount + ":");
whileStatement.getWhileStatement().accept(this);
this.texts.add("jmp __LOOP_CONDITION_" + loopCount);
this.texts.add("__LOOP_END_" + loopCount + ":");
}
private void generateIfStatement(IfStatement ifStatement) throws ChildTypeUnmatchException, Exception {
Integer conditionCount = this.conditionCount++;
this.texts.add("__IF_CONDITION_" + conditionCount + ":");
ifStatement.getIfCondition().accept(this);
this.texts.add("cmp eax, " + BOOLEAN_FALSE);
this.texts.add("je __ELSE_STATEMENT_" + conditionCount);
this.texts.add("__IF_STATEMENT_" + conditionCount + ":");
ifStatement.getIfStatement().accept(this);
this.texts.add("jmp __IF_END_" + conditionCount);
this.texts.add("__ELSE_STATEMENT_" + conditionCount + ":");
if (ifStatement.getElseStatement() != null) {
ifStatement.getElseStatement().accept(this);
}
this.texts.add("__IF_END_" + conditionCount + ":");
}
private void generateStaticFieldDeclaration(FieldDeclaration decl) throws Exception {
// TODO Move below
TableEntry entry = this.getCurrentScope().getParentTypeScope().getFieldDecl((FieldDeclaration) decl);
String label = null;
label = staticLabel(entry.getName());
this.texts.add("global " + label + "_INIT");
this.texts.add(label + "_INIT:");
this.data.add("global " + label);
this.data.add(label + ": dd 0x0");
// Place the node into a list...
statics.add(label);
Expression initialization = decl.getInitial();
if (initialization != null) {
this.dereferenceVariable = false;
decl.getName().accept(this);
this.dereferenceVariable = true;
this.texts.add("push eax\t\t\t; Push LHS to stack");
initialization.accept(this);
this.texts.add("pop ebx");
this.texts.add("mov [ebx], eax");
}
this.texts.add("ret");
}
public void writeStaticInit() throws Exception {
this.texts = new ArrayList<String>();
this.externs = new HashSet<String>();
asmFile = new File("./output/staticInit.s");
this.asmFile.createNewFile();
this.texts.add("global Start_StaticInit\n");
this.texts.add("Start_StaticInit:\n");
for (int i = 0; i < staticInit.size(); i = i + 2) {
this.externs.add(staticInit.get(i));
this.texts.add(staticInit.get(i + 1));
}
BufferedWriter asmWriter = new BufferedWriter(new FileWriter(this.asmFile));
for (String line : this.externs) {
asmWriter.write("extern " + line);
}
asmWriter.newLine();
for (String line : this.texts) {
if (!line.startsWith("global") && !line.startsWith("section")) {
line = "\t" + line;
if (!line.endsWith(":")) {
line = "\t" + line;
}
}
asmWriter.write(line);
}
asmWriter.newLine();
asmWriter.write("ret\n");
asmWriter.close();
}
public void generateSubtypeTable() throws Exception {
LinkedHashMap<TypeDeclaration, Stack<TypeScope>> classHierachyChain = HierarchyChecker.getLinkedClassHierachyChain();
// System.out.println(HierarchyChecker.getLinkedClassHierachyChain());
asmFile = new File("./output/subtypeTable.s");
this.asmFile.createNewFile();
BufferedWriter asmWriter = new BufferedWriter(new FileWriter(this.asmFile));
this.texts = new ArrayList<String>();
this.texts.add("global SubtypeTable\n");
this.texts.add("SubtypeTable:\n");
int classNum = HierarchyChecker.getTotalClassNum();
boolean[] subtypeArr = new boolean[classNum * classNum];
int i = 0;
for (Entry<TypeDeclaration, Stack<TypeScope>> entry : classHierachyChain.entrySet()) {
// System.out.println(entry+"entry:");
// entry.
Stack<TypeScope> classScopes = entry.getValue();
while (!classScopes.empty()) {
TypeScope classScope = classScopes.pop();
if (classScope.getReferenceNode() instanceof TypeDeclaration) {
int index = ((TypeDeclaration) classScope.getReferenceNode()).getHierarchyTableIndex();
subtypeArr[i * classNum + index] = true;
}
}
i++;
}
for (i = 0; i < subtypeArr.length; i++) {
if (subtypeArr[i]) {
this.texts.add("dd 0xffffffff");
} else {
this.texts.add("dd 0x0");
}
}
for (String line : this.texts) {
asmWriter.write(line);
asmWriter.newLine();
}
asmWriter.close();
}
public void copyNullAsm() throws Exception {
File nullAsm = new File("resources/null.s");
File outputAsm = new File("output/null.s");
Files.copy(nullAsm, outputAsm);
}
}
| Do not reference current object on field access
| src/ca/uwaterloo/joos/codegen/CodeGenerator.java | Do not reference current object on field access | <ide><path>rc/ca/uwaterloo/joos/codegen/CodeGenerator.java
<ide> import java.util.Stack;
<ide> import java.util.logging.Level;
<ide> import java.util.logging.Logger;
<del>
<del>import com.google.common.io.Files;
<ide>
<ide> import ca.uwaterloo.joos.Main;
<ide> import ca.uwaterloo.joos.ast.ASTNode;
<ide> import ca.uwaterloo.joos.ast.expr.name.SimpleName;
<ide> import ca.uwaterloo.joos.ast.expr.primary.ArrayAccess;
<ide> import ca.uwaterloo.joos.ast.expr.primary.ArrayCreate;
<add>import ca.uwaterloo.joos.ast.expr.primary.FieldAccess;
<ide> import ca.uwaterloo.joos.ast.expr.primary.LiteralPrimary;
<ide> import ca.uwaterloo.joos.ast.expr.primary.LiteralPrimary.LiteralType;
<ide> import ca.uwaterloo.joos.ast.expr.primary.Primary;
<ide> import ca.uwaterloo.joos.symboltable.TableEntry;
<ide> import ca.uwaterloo.joos.symboltable.TypeScope;
<ide>
<add>import com.google.common.io.Files;
<add>
<ide> public class CodeGenerator extends SemanticsVisitor {
<ide> public static final Logger logger = Main.getLogger(CodeGenerator.class);
<ide>
<ide> private Integer loopCount = 0;
<ide> private Integer conditionCount = 0;
<ide> private Boolean dereferenceVariable = true;
<add> private Boolean referenceCurrentObject = true;
<ide>
<ide> private Set<Class<?>> complexNodes = null;
<ide>
<ide> this.comparisonCount = 0;
<ide> this.loopCount = 0;
<ide> this.dereferenceVariable = true;
<add> this.referenceCurrentObject = true;
<add>
<ide> // Place the runtime.s externs
<ide> this.externs.add("__malloc");
<ide> this.externs.add("__debexit");
<ide> this.generateStaticFieldDeclaration((FieldDeclaration) node);
<ide> }
<ide> return false;
<add> } else if (node instanceof FieldAccess) {
<add> this.generateFieldAccess((FieldAccess) node);
<add> return false;
<ide> } else if (node instanceof MethodDeclaration) {
<ide> Block body = ((MethodDeclaration) node).getBody();
<ide> if (body != null) {
<ide>
<ide> private void generateVariableAccess(Name name) throws Exception {
<ide> int i = 0;
<add>
<add> if (name instanceof Name && this.referenceCurrentObject) {
<add> this.texts.add("mov eax, [ebp + 8]\t; Current object");
<add> }
<ide>
<ide> if (name instanceof SimpleName) {
<ide> TableEntry entry = ((SimpleName) name).getOriginalDeclaration();
<ide> throw new Exception("Unknown field " + field);
<ide> }
<ide> } else if (this.dereferenceVariable) {
<del> this.texts.add("mov eax, [ebp + 8]\t; Current object");
<ide> this.generateVariableDereference(entry);
<ide> } else {
<del> this.texts.add("mov eax, [ebp + 8]\t; Current object");
<ide> this.generateVariableAddress(entry);
<ide> }
<ide> } else if (name instanceof QualifiedName) {
<del> this.texts.add("mov eax, [ebp + 8]\t; Current object");
<del>
<ide> TableEntry entry = ((QualifiedName) name).getOriginalDeclaration();
<ide> if (entry.getNode() instanceof VariableDeclaration) {
<ide> this.generateVariableDereference(entry);
<ide> }
<ide> this.texts.add("__IF_END_" + conditionCount + ":");
<ide> }
<add>
<add> private void generateFieldAccess(FieldAccess fieldAccess) throws Exception {
<add> fieldAccess.getPrimary().accept(this);
<add> this.referenceCurrentObject = false;
<add> fieldAccess.getName().accept(this);
<add> this.referenceCurrentObject = true;
<add> }
<ide>
<ide> private void generateStaticFieldDeclaration(FieldDeclaration decl) throws Exception {
<ide> // TODO Move below |
|
Java | bsd-3-clause | 57af20d0469f87e8a32bef7df95adfa96567a60c | 0 | NCIP/c3pr,NCIP/c3pr,NCIP/c3pr | package edu.duke.cabig.c3pr.domain;
import org.easymock.classextension.EasyMock;
import org.springframework.context.MessageSource;
import edu.duke.cabig.c3pr.AbstractTestCase;
import edu.duke.cabig.c3pr.exception.C3PRCodedException;
import edu.duke.cabig.c3pr.exception.C3PRExceptionHelper;
import edu.duke.cabig.c3pr.utils.StudyCreationHelper;
public class StudyTestCase extends AbstractTestCase{
@Override
protected void setUp() throws Exception {
super.setUp();
basicStudy = studyCreationHelper.createBasicStudy();
basicStudy.setC3prErrorMessages(c3prErrorMessages);
basicStudy.setC3PRExceptionHelper(c3prExceptionHelper);
}
C3PRExceptionHelper c3prExceptionHelper = registerMockFor(C3PRExceptionHelper.class);
MessageSource c3prErrorMessages = registerMockFor(MessageSource.class);
private StudyCreationHelper studyCreationHelper = new StudyCreationHelper() ;
private Study basicStudy;
public void setStudyCreationHelper(StudyCreationHelper studyCreationHelper) {
this.studyCreationHelper = studyCreationHelper;
}
public void testDataEntryStatusIncompleteCase1() throws Exception {
EasyMock.expect(c3prErrorMessages.getMessage("C3PR.EXCEPTION.STUDY.DATAENTRY.MISSING.STUDY_SITE.CODE", null, null)).andReturn("301");
EasyMock.expect(c3prExceptionHelper.getException(301)).andReturn(new C3PRCodedException(301, "exception message"));
replayMocks();
try {
basicStudy.evaluateDataEntryStatus();
fail("Should have thrown C3PRCodedException");
} catch (Exception e) {
assertEquals("Exception should have been of type C3PRCodedException",true, e instanceof C3PRCodedException);
}
verifyMocks();
}
public void testDataEntryStatusIncompleteCase2() throws Exception {
basicStudy.addStudySite(new StudySite());
EasyMock.expect(c3prErrorMessages.getMessage("C3PR.EXCEPTION.STUDY.DATAENTRY.MISSING.ENROLLING_EPOCH.CODE", null, null)).andReturn("300");
EasyMock.expect(c3prExceptionHelper.getException(300)).andReturn(new C3PRCodedException(300, "exception message"));
replayMocks();
try {
basicStudy.evaluateDataEntryStatus();
fail("Should have thrown C3PRCodedException");
} catch (Exception e) {
assertEquals("Exception should have been of type C3PRCodedException",true, e instanceof C3PRCodedException);
}
verifyMocks();
}
public void testDataEntryStatusIncompleteCase3() throws Exception {
basicStudy.addStudySite(new StudySite());
Epoch nonTreatmentEpoch = new Epoch();
basicStudy.addEpoch(nonTreatmentEpoch);
EasyMock.expect(c3prErrorMessages.getMessage("C3PR.EXCEPTION.STUDY.DATAENTRY.MISSING.ENROLLING_EPOCH.CODE", null, null)).andReturn("302");
EasyMock.expect(c3prExceptionHelper.getException(302)).andReturn(new C3PRCodedException(302, "exception message"));
replayMocks();
try {
basicStudy.evaluateDataEntryStatus();
fail("Should have thrown C3PRCodedException");
} catch (Exception e) {
assertEquals("Exception should have been of type C3PRCodedException",true, e instanceof C3PRCodedException);
}
verifyMocks();
}
public void testDataEntryStatusIncompleteCase4() throws Exception {
studyCreationHelper.addStudySiteAndRandomizedTreatmentEpochToBasicStudy(basicStudy);
basicStudy.getEpochs().get(0).setExceptionHelper(c3prExceptionHelper);
basicStudy.getEpochs().get(0).setC3prErrorMessages(c3prErrorMessages);
basicStudy.setStratificationIndicator(false);
EasyMock.expect(c3prErrorMessages.getMessage("C3PR.EXCEPTION.STUDY.DATAENTRY.MISSING.ATLEAST_2_ARMS_FOR_RANDOMIZED_EPOCH.CODE", null, null)).andReturn("306");
EasyMock.expect(c3prExceptionHelper.getException(EasyMock.eq(306),EasyMock.aryEq(new String[]{"Treatment Epoch1"}))).andReturn(new C3PRCodedException(306, "exception message"));
replayMocks();
try {
basicStudy.evaluateDataEntryStatus();
fail("Should have thrown C3PRCodedException");
} catch (Exception e) {
assertEquals("Exception should have been of type C3PRCodedException",true, e instanceof C3PRCodedException);
}
verifyMocks();
}
public void testDataEntryStatusIncompleteCase5() throws Exception {
studyCreationHelper.addStudySiteAndRandomizedTreatmentEpochWith2ArmsToBasicStudy(basicStudy);
basicStudy.getEpochs().get(0).setExceptionHelper(c3prExceptionHelper);
basicStudy.getEpochs().get(0).setC3prErrorMessages(c3prErrorMessages);
EasyMock.expect(c3prErrorMessages.getMessage("C3PR.EXCEPTION.STUDY.DATAENTRY.MISSING.STRATIFICATION_CRITERIA_OR_STRATUM_GROUPS_FOR_RANDOMIZED_EPOCH.CODE", null, null)).andReturn("304");
EasyMock.expect(c3prExceptionHelper.getException(EasyMock.eq(304),EasyMock.aryEq(new String[]{"Treatment Epoch1"}))).andReturn(new C3PRCodedException(304, "exception message"));
basicStudy.setStratificationIndicator(true);
basicStudy.getEpochs().get(0).setStratificationIndicator(true);
replayMocks();
try {
basicStudy.evaluateDataEntryStatus();
fail("Should have thrown C3PRCodedException");
} catch (Exception e) {
assertEquals("Exception should have been of type C3PRCodedException",true, e instanceof C3PRCodedException);
}
verifyMocks();
}
public void testDataEntryStatusIncompleteCase6() throws Exception {
studyCreationHelper.addStudySiteRandomizedEnrollingTreatmentEpochWith2ArmsAndStratumGroupsToBasicStudy(basicStudy);
basicStudy.getEpochs().get(0).setExceptionHelper(c3prExceptionHelper);
basicStudy.getEpochs().get(0).setC3prErrorMessages(c3prErrorMessages);
basicStudy.setStratificationIndicator(true);
basicStudy.getEpochs().get(0).setStratificationIndicator(true);
EasyMock.expect(c3prErrorMessages.getMessage("C3PR.EXCEPTION.STUDY.DATAENTRY.MISSING.RANDOMIZATION_FOR_RANDOMIZED_EPOCH.CODE", null, null)).andReturn("307");
EasyMock.expect(c3prExceptionHelper.getException(EasyMock.eq(307),EasyMock.aryEq(new String[]{"Treatment Epoch1"}))).andReturn(new C3PRCodedException(307, "exception message"));
replayMocks();
try {
basicStudy.evaluateDataEntryStatus();
fail("Should have thrown C3PRCodedException");
} catch (Exception e) {
assertEquals("Exception should have been of type C3PRCodedException",true, e instanceof C3PRCodedException);
}
verifyMocks();
}
public void testDataEntryStatusCompleteCase1() throws Exception {
studyCreationHelper.addStudySiteAndEnrollingEpochToBasicStudy(basicStudy);
basicStudy.setStratificationIndicator(false);
replayMocks();
assertEquals("Data Entry Status should evaluate to Complete",StudyDataEntryStatus.COMPLETE,basicStudy.evaluateDataEntryStatus());
verifyMocks();
}
public void testDataEntryStatusCompleteCase2() throws Exception {
studyCreationHelper.addStudySiteRandomizedTreatmentEpochWith2ArmsStratumGroupsAndRandomizationToBasicStudy(basicStudy);
basicStudy.setStratificationIndicator(true);
basicStudy.getEpochs().get(0).setStratificationIndicator(true);
replayMocks();
assertEquals("Wrong Data Entry Status",StudyDataEntryStatus.COMPLETE,basicStudy.evaluateDataEntryStatus());
verifyMocks();
}
public void testSiteStudyStatusPendingCase1() throws Exception {
studyCreationHelper.addStudySiteAndEnrollingEpochToBasicStudy(basicStudy);
basicStudy.setCoordinatingCenterStudyStatus(CoordinatingCenterStudyStatus.PENDING);
assertEquals("Wrong Site study status",SiteStudyStatus.PENDING,basicStudy.getStudySites().get(0).evaluateSiteStudyStatus());
}
public void testSiteStudyStatusActiveCase1() throws Exception {
studyCreationHelper.addStudySiteRandomizedTreatmentEpochWith2ArmsStratumGroupsAndRandomizationToBasicStudy(basicStudy);
basicStudy.setStratificationIndicator(false);
basicStudy.setCoordinatingCenterStudyStatus(basicStudy.evaluateCoordinatingCenterStudyStatus());
assertEquals("Study status should evaluate to Active",CoordinatingCenterStudyStatus.OPEN,basicStudy.getCoordinatingCenterStudyStatus());
assertEquals("Site Study status should evaluate to OPEN",SiteStudyStatus.ACTIVE,basicStudy.getStudySites().get(0).evaluateSiteStudyStatus());
}
}
| codebase/projects/core/test/src/java/edu/duke/cabig/c3pr/domain/StudyTestCase.java | package edu.duke.cabig.c3pr.domain;
import org.easymock.classextension.EasyMock;
import org.springframework.context.MessageSource;
import edu.duke.cabig.c3pr.AbstractTestCase;
import edu.duke.cabig.c3pr.exception.C3PRCodedException;
import edu.duke.cabig.c3pr.exception.C3PRExceptionHelper;
import edu.duke.cabig.c3pr.utils.StudyCreationHelper;
public class StudyTestCase extends AbstractTestCase{
@Override
protected void setUp() throws Exception {
super.setUp();
basicStudy = studyCreationHelper.createBasicStudy();
basicStudy.setC3prErrorMessages(c3prErrorMessages);
basicStudy.setC3PRExceptionHelper(c3prExceptionHelper);
}
C3PRExceptionHelper c3prExceptionHelper = registerMockFor(C3PRExceptionHelper.class);
MessageSource c3prErrorMessages = registerMockFor(MessageSource.class);
private StudyCreationHelper studyCreationHelper = new StudyCreationHelper() ;
private Study basicStudy;
public void setStudyCreationHelper(StudyCreationHelper studyCreationHelper) {
this.studyCreationHelper = studyCreationHelper;
}
public void testDataEntryStatusIncompleteCase1() throws Exception {
EasyMock.expect(c3prErrorMessages.getMessage("C3PR.EXCEPTION.STUDY.DATAENTRY.MISSING.STUDY_SITE.CODE", null, null)).andReturn("301");
EasyMock.expect(c3prExceptionHelper.getException(301)).andReturn(new C3PRCodedException(301, "exception message"));
replayMocks();
try {
basicStudy.evaluateDataEntryStatus();
fail("Should have thrown C3PRCodedException");
} catch (Exception e) {
assertEquals("Exception should have been of type C3PRCodedException",true, e instanceof C3PRCodedException);
}
verifyMocks();
}
public void testDataEntryStatusIncompleteCase2() throws Exception {
basicStudy.addStudySite(new StudySite());
EasyMock.expect(c3prErrorMessages.getMessage("C3PR.EXCEPTION.STUDY.DATAENTRY.MISSING.ENROLLING_EPOCH.CODE", null, null)).andReturn("300");
EasyMock.expect(c3prExceptionHelper.getException(300)).andReturn(new C3PRCodedException(300, "exception message"));
replayMocks();
try {
basicStudy.evaluateDataEntryStatus();
fail("Should have thrown C3PRCodedException");
} catch (Exception e) {
assertEquals("Exception should have been of type C3PRCodedException",true, e instanceof C3PRCodedException);
}
verifyMocks();
}
public void testDataEntryStatusIncompleteCase3() throws Exception {
basicStudy.addStudySite(new StudySite());
Epoch nonTreatmentEpoch = new Epoch();
basicStudy.addEpoch(nonTreatmentEpoch);
EasyMock.expect(c3prErrorMessages.getMessage("C3PR.EXCEPTION.STUDY.DATAENTRY.MISSING.ENROLLING_EPOCH.CODE", null, null)).andReturn("302");
EasyMock.expect(c3prExceptionHelper.getException(302)).andReturn(new C3PRCodedException(302, "exception message"));
replayMocks();
try {
basicStudy.evaluateDataEntryStatus();
fail("Should have thrown C3PRCodedException");
} catch (Exception e) {
assertEquals("Exception should have been of type C3PRCodedException",true, e instanceof C3PRCodedException);
}
verifyMocks();
}
public void testDataEntryStatusIncompleteCase4() throws Exception {
studyCreationHelper.addStudySiteAndRandomizedTreatmentEpochToBasicStudy(basicStudy);
basicStudy.getEpochs().get(0).setExceptionHelper(c3prExceptionHelper);
basicStudy.getEpochs().get(0).setC3prErrorMessages(c3prErrorMessages);
basicStudy.setStratificationIndicator(false);
EasyMock.expect(c3prErrorMessages.getMessage("C3PR.EXCEPTION.STUDY.DATAENTRY.MISSING.ATLEAST_2_ARMS_FOR_RANDOMIZED_EPOCH.CODE", null, null)).andReturn("306");
EasyMock.expect(c3prExceptionHelper.getException(EasyMock.eq(306),EasyMock.aryEq(new String[]{"Treatment Epoch1"}))).andReturn(new C3PRCodedException(306, "exception message"));
replayMocks();
try {
basicStudy.evaluateDataEntryStatus();
fail("Should have thrown C3PRCodedException");
} catch (Exception e) {
assertEquals("Exception should have been of type C3PRCodedException",true, e instanceof C3PRCodedException);
}
verifyMocks();
}
public void testDataEntryStatusIncompleteCase5() throws Exception {
studyCreationHelper.addStudySiteAndRandomizedTreatmentEpochWith2ArmsToBasicStudy(basicStudy);
basicStudy.getEpochs().get(0).setExceptionHelper(c3prExceptionHelper);
basicStudy.getEpochs().get(0).setC3prErrorMessages(c3prErrorMessages);
EasyMock.expect(c3prErrorMessages.getMessage("C3PR.EXCEPTION.STUDY.DATAENTRY.MISSING.STRATIFICATION_CRITERIA_OR_STRATUM_GROUPS_FOR_RANDOMIZED_EPOCH.CODE", null, null)).andReturn("304");
EasyMock.expect(c3prExceptionHelper.getException(EasyMock.eq(304),EasyMock.aryEq(new String[]{"Treatment Epoch1"}))).andReturn(new C3PRCodedException(304, "exception message"));
basicStudy.setStratificationIndicator(true);
basicStudy.getEpochs().get(0).setStratificationIndicator(true);
replayMocks();
try {
basicStudy.evaluateDataEntryStatus();
fail("Should have thrown C3PRCodedException");
} catch (Exception e) {
assertEquals("Exception should have been of type C3PRCodedException",true, e instanceof C3PRCodedException);
}
verifyMocks();
}
public void testDataEntryStatusIncompleteCase6() throws Exception {
studyCreationHelper.addStudySiteRandomizedEnrollingTreatmentEpochWith2ArmsAndStratumGroupsToBasicStudy(basicStudy);
basicStudy.getEpochs().get(0).setExceptionHelper(c3prExceptionHelper);
basicStudy.getEpochs().get(0).setC3prErrorMessages(c3prErrorMessages);
basicStudy.setStratificationIndicator(true);
basicStudy.getEpochs().get(0).setStratificationIndicator(true);
EasyMock.expect(c3prErrorMessages.getMessage("C3PR.EXCEPTION.STUDY.DATAENTRY.MISSING.RANDOMIZATION_FOR_RANDOMIZED_EPOCH.CODE", null, null)).andReturn("307");
EasyMock.expect(c3prExceptionHelper.getException(EasyMock.eq(307),EasyMock.aryEq(new String[]{"Treatment Epoch1"}))).andReturn(new C3PRCodedException(307, "exception message"));
replayMocks();
try {
basicStudy.evaluateDataEntryStatus();
fail("Should have thrown C3PRCodedException");
} catch (Exception e) {
assertEquals("Exception should have been of type C3PRCodedException",true, e instanceof C3PRCodedException);
}
verifyMocks();
}
public void testDataEntryStatusCompleteCase1() throws Exception {
studyCreationHelper.addStudySiteAndEnrollingEpochToBasicStudy(basicStudy);
basicStudy.setStratificationIndicator(false);
replayMocks();
assertEquals("Data Entry Status should evaluate to Complete",StudyDataEntryStatus.COMPLETE,basicStudy.evaluateDataEntryStatus());
verifyMocks();
}
public void testDataEntryStatusCompleteCase2() throws Exception {
studyCreationHelper.addStudySiteRandomizedTreatmentEpochWith2ArmsStratumGroupsAndRandomizationToBasicStudy(basicStudy);
basicStudy.setStratificationIndicator(true);
basicStudy.getEpochs().get(0).setStratificationIndicator(true);
replayMocks();
assertEquals("Wrong Data Entry Status",StudyDataEntryStatus.COMPLETE,basicStudy.evaluateDataEntryStatus());
verifyMocks();
}
public void testSiteStudyStatusPendingCase1() throws Exception {
studyCreationHelper.addStudySiteAndEnrollingEpochToBasicStudy(basicStudy);
basicStudy.setCoordinatingCenterStudyStatus(CoordinatingCenterStudyStatus.PENDING);
assertEquals("Wrong Site study status",SiteStudyStatus.PENDING,basicStudy.getStudySites().get(0).evaluateSiteStudyStatus());
}
public void testSiteStudyStatusActiveCase1() throws Exception {
studyCreationHelper.addStudySiteRandomizedTreatmentEpochWith2ArmsStratumGroupsAndRandomizationToBasicStudy(basicStudy);
basicStudy.setStratificationIndicator(false);
basicStudy.setCoordinatingCenterStudyStatus(basicStudy.evaluateCoordinatingCenterStudyStatus());
assertEquals("Study status should evaluate to Active",CoordinatingCenterStudyStatus.ACTIVE,basicStudy.getCoordinatingCenterStudyStatus());
assertEquals("Site Study status should evaluate to Active",SiteStudyStatus.ACTIVE,basicStudy.getStudySites().get(0).evaluateSiteStudyStatus());
}
}
| CPR-304: changed coordinating study status from Active to Open
| codebase/projects/core/test/src/java/edu/duke/cabig/c3pr/domain/StudyTestCase.java | CPR-304: changed coordinating study status from Active to Open | <ide><path>odebase/projects/core/test/src/java/edu/duke/cabig/c3pr/domain/StudyTestCase.java
<ide> studyCreationHelper.addStudySiteRandomizedTreatmentEpochWith2ArmsStratumGroupsAndRandomizationToBasicStudy(basicStudy);
<ide> basicStudy.setStratificationIndicator(false);
<ide> basicStudy.setCoordinatingCenterStudyStatus(basicStudy.evaluateCoordinatingCenterStudyStatus());
<del> assertEquals("Study status should evaluate to Active",CoordinatingCenterStudyStatus.ACTIVE,basicStudy.getCoordinatingCenterStudyStatus());
<del> assertEquals("Site Study status should evaluate to Active",SiteStudyStatus.ACTIVE,basicStudy.getStudySites().get(0).evaluateSiteStudyStatus());
<add> assertEquals("Study status should evaluate to Active",CoordinatingCenterStudyStatus.OPEN,basicStudy.getCoordinatingCenterStudyStatus());
<add> assertEquals("Site Study status should evaluate to OPEN",SiteStudyStatus.ACTIVE,basicStudy.getStudySites().get(0).evaluateSiteStudyStatus());
<ide> }
<ide>
<ide> |
|
Java | mit | 85c0591bd23b00112fe1697cfc628486cf87d443 | 0 | HEIG-GAPS/slasher | package ch.gaps.slasher.highliter;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.List;
import java.util.regex.Pattern;
/**
* This interface represents the highlighter for a CodeArea (RichTextFX class)
*/
public interface Highlighter {
// regex used for highlighting
// common for any SQL syntax
String STRING_PATTERN = "'([^\"\\\\]|\\\\.)*'";
String SEMICOLON_PATTERN = "\\;";
String COMMENT_PATTERN = "\\-{2}[^\n]*";
// pattern group names
String STRING_GROUP_NAME = "STRING";
String SEMICOLON_GROUP_NAME = "SEMICOLON";
String COMMENT_GROUP_NAME = "COMMENT";
String KEYWORD_GROUP_NAME = "KEYWORD";
/**
* Returns the list of the sql keywords
* @return the list of the sql keywords
* @throws URISyntaxException if the problem occured while generating an uri of the file containing the keywords
* @throws IOException if the problem occured while reading the content of the file containing the keywords
*/
default List<String> getKeywords() throws URISyntaxException, IOException {
return Files.readAllLines(Paths.get(Highlighter.class.getResource("sql2003_keywords.txt").toURI()));
}
/**
* Return the
* Important note: CSS style class is the matcher group name in lower case
* @return the matcher group names
*/
List<String> getMatcherGroupNames();
/**
* String Pattern fot the given SQL syntax
* @return the complete {@link Pattern} containing all the groups we want to highlight
*/
Pattern getPattern();
}
| slasher.highlighter/src/main/java/ch/gaps/slasher/highliter/Highlighter.java | package ch.gaps.slasher.highliter;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.List;
import java.util.regex.Pattern;
/**
* This interface represents the highlighter for a CodeArea (RichTextFX class)
*/
public interface Highlighter {
// regex used for highlighting
// common for any SQL syntax
String STRING_PATTERN = "'([^\"\\\\]|\\\\.)*'";
String SEMICOLON_PATTERN = "\\;";
String COMMENT_PATTERN = "\\-{2}[^\n]*";
// pattern group names
String STRING_GROUP_NAME = "STRING";
String SEMICOLON_GROUP_NAME = "SEMICOLON";
String COMMENT_GROUP_NAME = "COMMENT";
String KEYWORD_GROUP_NAME = "KEYWORD";
/**
* Returns the list of the sql keywords
* @return the list of the sql keywords
* @throws URISyntaxException if the problem occured while generating an uri of the file containing the keywords
* @throws IOException if the problem occured while reading the content of the file containing the keywords
*/
default List<String> getKeywords() throws URISyntaxException, IOException {
return Files.readAllLines(Paths.get(Highlighter.class.getResource("sql2003_keywords.txt").toURI()));
}
/**
* Return the
* Important note: CSS style class is the matcher group name in lower case
* @return the matcher group names
*/
List<String> getMatcherGroupNames();
/**
* String Pattern fot the given SQL syntax
* @return
*/
Pattern getPattern();
}
| fork synchronized
| slasher.highlighter/src/main/java/ch/gaps/slasher/highliter/Highlighter.java | fork synchronized | <ide><path>lasher.highlighter/src/main/java/ch/gaps/slasher/highliter/Highlighter.java
<ide>
<ide> /**
<ide> * String Pattern fot the given SQL syntax
<del> * @return
<add> * @return the complete {@link Pattern} containing all the groups we want to highlight
<ide> */
<ide> Pattern getPattern();
<ide> } |
|
JavaScript | mit | 4d3672ae5d52b781ea1a740142a74ba416833113 | 0 | ironhack/hackmd,ironhack/hackmd,jccrofty30/hackmd,Rwing/hackmd,PeterDaveHello/hackmd,hackmdio/hackmd,jackycute/HackMD,PeterDaveHello/hackmd,PeterDaveHello/hackmd,hackmdio/hackmd,Rwing/hackmd,jccrofty30/hackmd,jackycute/HackMD,ironhack/hackmd,Yukaii/hackmd,hackmdio/hackmd,Yukaii/hackmd,jackycute/HackMD,Yukaii/hackmd,aswathyjacob8/newtest,jccrofty30/hackmd,Rwing/hackmd,aswathyjacob8/newtest,aswathyjacob8/newtest | //app
//external modules
var express = require('express');
var toobusy = require('toobusy-js');
var ejs = require('ejs');
var passport = require('passport');
var methodOverride = require('method-override');
var cookieParser = require('cookie-parser');
var bodyParser = require('body-parser');
var compression = require('compression')
var session = require('express-session');
var SequelizeStore = require('connect-session-sequelize')(session.Store);
var fs = require('fs');
var imgur = require('imgur');
var formidable = require('formidable');
var morgan = require('morgan');
var passportSocketIo = require("passport.socketio");
var helmet = require('helmet');
var i18n = require('i18n');
//core
var config = require("./lib/config.js");
var logger = require("./lib/logger.js");
var auth = require("./lib/auth.js");
var history = require("./lib/history.js");
var response = require("./lib/response.js");
var models = require("./lib/models");
//server setup
if (config.usessl) {
var ca = (function () {
var i, len, results;
results = [];
for (i = 0, len = config.sslcapath.length; i < len; i++) {
results.push(fs.readFileSync(config.sslcapath[i], 'utf8'));
}
return results;
})();
var options = {
key: fs.readFileSync(config.sslkeypath, 'utf8'),
cert: fs.readFileSync(config.sslcertpath, 'utf8'),
ca: ca,
dhparam: fs.readFileSync(config.dhparampath, 'utf8'),
requestCert: false,
rejectUnauthorized: false
};
var app = express();
var server = require('https').createServer(options, app);
} else {
var app = express();
var server = require('http').createServer(app);
}
//logger
app.use(morgan('combined', {
"stream": logger.stream
}));
//socket io
var io = require('socket.io')(server);
//others
var realtime = require("./lib/realtime.js");
//assign socket io to realtime
realtime.io = io;
//methodOverride
app.use(methodOverride('_method'));
// create application/json parser
var jsonParser = bodyParser.json({
limit: 1024 * 1024 * 10 // 10 mb
});
// create application/x-www-form-urlencoded parser
var urlencodedParser = bodyParser.urlencoded({
extended: false,
limit: 1024 * 1024 * 10 // 10 mb
});
//session store
var sessionStore = new SequelizeStore({
db: models.sequelize
});
//compression
app.use(compression());
// use hsts to tell https users stick to this
app.use(helmet.hsts({
maxAge: 31536000 * 1000, // 365 days
includeSubdomains: true,
preload: true
}));
i18n.configure({
locales: ['en', 'zh', 'fr', 'de', 'ja', 'es', 'el', 'pt', 'it', 'tr', 'ru', 'nl', 'hr', 'pl', 'uk', 'hi', 'sv'],
cookie: 'locale',
directory: __dirname + '/locales'
});
app.use(cookieParser());
app.use(i18n.init);
// routes without sessions
// static files
app.use('/', express.static(__dirname + '/public', { maxAge: config.staticcachetime }));
app.use('/vendor/', express.static(__dirname + '/bower_components', { maxAge: config.staticcachetime }));
//session
app.use(session({
name: config.sessionname,
secret: config.sessionsecret,
resave: false, //don't save session if unmodified
saveUninitialized: true, //always create session to ensure the origin
rolling: true, // reset maxAge on every response
cookie: {
maxAge: config.sessionlife
},
store: sessionStore
}));
// session resumption
var tlsSessionStore = {};
server.on('newSession', function (id, data, cb) {
tlsSessionStore[id.toString('hex')] = data;
cb();
});
server.on('resumeSession', function (id, cb) {
cb(null, tlsSessionStore[id.toString('hex')] || null);
});
//middleware which blocks requests when we're too busy
app.use(function (req, res, next) {
if (toobusy()) {
response.errorServiceUnavailable(res);
} else {
next();
}
});
//passport
app.use(passport.initialize());
app.use(passport.session());
//serialize and deserialize
passport.serializeUser(function (user, done) {
logger.info('serializeUser: ' + user.id);
return done(null, user.id);
});
passport.deserializeUser(function (id, done) {
models.User.findOne({
where: {
id: id
}
}).then(function (user) {
logger.info('deserializeUser: ' + user.id);
return done(null, user);
}).catch(function (err) {
logger.error(err);
return done(err, null);
});
});
// redirect url with trailing slashes
app.use(function(req, res, next) {
if ("GET" == req.method && req.path.substr(-1) == '/' && req.path.length > 1) {
var query = req.url.slice(req.path.length);
res.redirect(301, config.serverurl + req.path.slice(0, -1) + query);
} else {
next();
}
});
// routes need sessions
//template files
app.set('views', __dirname + '/public/views');
//set render engine
app.engine('ejs', ejs.renderFile);
//set view engine
app.set('view engine', 'ejs');
//get index
app.get("/", response.showIndex);
//get 403 forbidden
app.get("/403", function (req, res) {
response.errorForbidden(res);
});
//get 404 not found
app.get("/404", function (req, res) {
response.errorNotFound(res);
});
//get 500 internal error
app.get("/500", function (req, res) {
response.errorInternalError(res);
});
//get status
app.get("/status", function (req, res, next) {
realtime.getStatus(function (data) {
res.set({
'Cache-Control': 'private', // only cache by client
'X-Robots-Tag': 'noindex, nofollow' // prevent crawling
});
res.send(data);
});
});
//get status
app.get("/temp", function (req, res) {
var host = req.get('host');
if (config.alloworigin.indexOf(host) == -1)
response.errorForbidden(res);
else {
var tempid = req.query.tempid;
if (!tempid)
response.errorForbidden(res);
else {
models.Temp.findOne({
where: {
id: tempid
}
}).then(function (temp) {
if (!temp)
response.errorNotFound(res);
else {
res.header("Access-Control-Allow-Origin", "*");
res.send({
temp: temp.data
});
temp.destroy().catch(function (err) {
if (err)
logger.error('remove temp failed: ' + err);
});
}
}).catch(function (err) {
logger.error(err);
return response.errorInternalError(res);
});
}
}
});
//post status
app.post("/temp", urlencodedParser, function (req, res) {
var host = req.get('host');
if (config.alloworigin.indexOf(host) == -1)
response.errorForbidden(res);
else {
var data = req.body.data;
if (!data)
response.errorForbidden(res);
else {
if (config.debug)
logger.info('SERVER received temp from [' + host + ']: ' + req.body.data);
models.Temp.create({
data: data
}).then(function (temp) {
if (temp) {
res.header("Access-Control-Allow-Origin", "*");
res.send({
status: 'ok',
id: temp.id
});
} else
response.errorInternalError(res);
}).catch(function (err) {
logger.error(err);
return response.errorInternalError(res);
});
}
}
});
function setReturnToFromReferer(req) {
var referer = req.get('referer');
if (!req.session) req.session = {};
req.session.returnTo = referer;
}
//facebook auth
if (config.facebook) {
app.get('/auth/facebook', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('facebook')(req, res, next);
});
//facebook auth callback
app.get('/auth/facebook/callback',
passport.authenticate('facebook', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
}
//twitter auth
if (config.twitter) {
app.get('/auth/twitter', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('twitter')(req, res, next);
});
//twitter auth callback
app.get('/auth/twitter/callback',
passport.authenticate('twitter', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
}
//github auth
if (config.github) {
app.get('/auth/github', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('github')(req, res, next);
});
//github auth callback
app.get('/auth/github/callback',
passport.authenticate('github', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
//github callback actions
app.get('/auth/github/callback/:noteId/:action', response.githubActions);
}
//gitlab auth
if (config.gitlab) {
app.get('/auth/gitlab', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('gitlab')(req, res, next);
});
//gitlab auth callback
app.get('/auth/gitlab/callback',
passport.authenticate('gitlab', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
//gitlab callback actions
app.get('/auth/gitlab/callback/:noteId/:action', response.gitlabActions);
}
//dropbox auth
if (config.dropbox) {
app.get('/auth/dropbox', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('dropbox-oauth2')(req, res, next);
});
//dropbox auth callback
app.get('/auth/dropbox/callback',
passport.authenticate('dropbox-oauth2', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
}
//google auth
if (config.google) {
app.get('/auth/google', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('google', { scope: ['profile'] })(req, res, next);
});
//google auth callback
app.get('/auth/google/callback',
passport.authenticate('google', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
}
//logout
app.get('/logout', function (req, res) {
if (config.debug && req.isAuthenticated())
logger.info('user logout: ' + req.user.id);
req.logout();
res.redirect(config.serverurl + '/');
});
//get history
app.get('/history', history.historyGet);
//post history
app.post('/history', urlencodedParser, history.historyPost);
//post history by note id
app.post('/history/:noteId', urlencodedParser, history.historyPost);
//delete history
app.delete('/history', history.historyDelete);
//delete history by note id
app.delete('/history/:noteId', history.historyDelete);
//get me info
app.get('/me', function (req, res) {
if (req.isAuthenticated()) {
models.User.findOne({
where: {
id: req.user.id
}
}).then(function (user) {
if (!user)
return response.errorNotFound(res);
var profile = models.User.parseProfile(user.profile);
res.send({
status: 'ok',
id: req.user.id,
name: profile.name,
photo: profile.photo
});
}).catch(function (err) {
logger.error('read me failed: ' + err);
return response.errorInternalError(res);
});
} else {
res.send({
status: 'forbidden'
});
}
});
//upload to imgur
app.post('/uploadimage', function (req, res) {
var form = new formidable.IncomingForm();
if (config.imageUploadType === 'filesystem') {
form.uploadDir = "public/uploads";
form.keepExtensions = true;
}
form.parse(req, function (err, fields, files) {
if (err || !files.image || !files.image.path) {
response.errorForbidden(res);
} else {
if (config.debug)
logger.info('SERVER received uploadimage: ' + JSON.stringify(files.image));
try {
switch (config.imageUploadType) {
case 'filesystem':
var path = require('path');
res.send({
link: path.join(config.serverurl, files.image.path.match(/^public(.+$)/)[1])
});
break;
case 'imgur':
default:
imgur.setClientId(config.imgur.clientID);
imgur.uploadFile(files.image.path)
.then(function (json) {
if (config.debug)
logger.info('SERVER uploadimage success: ' + JSON.stringify(json));
res.send({
link: json.data.link.replace(/^http:\/\//i, 'https://')
});
})
.catch(function (err) {
logger.error(err);
return res.status(500).end('upload image error');
});
break;
}
} catch (err) {
logger.error(err);
return res.status(500).end('upload image error');
}
}
});
});
//get new note
app.get("/new", response.newNote);
//get publish note
app.get("/s/:shortid", response.showPublishNote);
//publish note actions
app.get("/s/:shortid/:action", response.publishNoteActions);
//get publish slide
app.get("/p/:shortid", response.showPublishSlide);
//publish slide actions
app.get("/p/:shortid/:action", response.publishSlideActions);
//get note by id
app.get("/:noteId", response.showNote);
//note actions
app.get("/:noteId/:action", response.noteActions);
//note actions with action id
app.get("/:noteId/:action/:actionId", response.noteActions);
// response not found if no any route matches
app.get('*', function (req, res) {
response.errorNotFound(res);
});
//socket.io secure
io.use(realtime.secure);
//socket.io auth
io.use(passportSocketIo.authorize({
cookieParser: cookieParser,
key: config.sessionname,
secret: config.sessionsecret,
store: sessionStore,
success: realtime.onAuthorizeSuccess,
fail: realtime.onAuthorizeFail
}));
//socket.io heartbeat
io.set('heartbeat interval', config.heartbeatinterval);
io.set('heartbeat timeout', config.heartbeattimeout);
//socket.io connection
io.sockets.on('connection', realtime.connection);
//listen
function startListen() {
server.listen(config.port, function () {
var schema = config.usessl ? 'HTTPS' : 'HTTP';
logger.info('%s Server listening at port %d', schema, config.port);
config.maintenance = false;
});
}
// sync db then start listen
models.sequelize.sync().then(function () {
// check if realtime is ready
if (history.isReady() && realtime.isReady()) {
models.Revision.checkAllNotesRevision(function (err, notes) {
if (err) throw new Error(err);
if (!notes || notes.length <= 0) return startListen();
});
} else {
throw new Error('server still not ready after db synced');
}
});
// log uncaught exception
process.on('uncaughtException', function (err) {
logger.error('An uncaught exception has occured.');
logger.error(err);
logger.error('Process will exit now.');
process.exit(1);
});
// gracefully exit
process.on('SIGINT', function () {
config.maintenance = true;
// disconnect all socket.io clients
Object.keys(io.sockets.sockets).forEach(function (key) {
var socket = io.sockets.sockets[key];
// notify client server going into maintenance status
socket.emit('maintenance');
setTimeout(function () {
socket.disconnect(true);
}, 0);
});
var checkCleanTimer = setInterval(function () {
if (history.isReady() && realtime.isReady()) {
models.Revision.checkAllNotesRevision(function (err, notes) {
if (err) throw new Error(err);
if (!notes || notes.length <= 0) {
clearInterval(checkCleanTimer);
return process.exit(0);
}
});
}
}, 100);
});
| app.js | //app
//external modules
var express = require('express');
var toobusy = require('toobusy-js');
var ejs = require('ejs');
var passport = require('passport');
var methodOverride = require('method-override');
var cookieParser = require('cookie-parser');
var bodyParser = require('body-parser');
var compression = require('compression')
var session = require('express-session');
var SequelizeStore = require('connect-session-sequelize')(session.Store);
var fs = require('fs');
var imgur = require('imgur');
var formidable = require('formidable');
var morgan = require('morgan');
var passportSocketIo = require("passport.socketio");
var helmet = require('helmet');
var i18n = require('i18n');
//core
var config = require("./lib/config.js");
var logger = require("./lib/logger.js");
var auth = require("./lib/auth.js");
var history = require("./lib/history.js");
var response = require("./lib/response.js");
var models = require("./lib/models");
//server setup
if (config.usessl) {
var ca = (function () {
var i, len, results;
results = [];
for (i = 0, len = config.sslcapath.length; i < len; i++) {
results.push(fs.readFileSync(config.sslcapath[i], 'utf8'));
}
return results;
})();
var options = {
key: fs.readFileSync(config.sslkeypath, 'utf8'),
cert: fs.readFileSync(config.sslcertpath, 'utf8'),
ca: ca,
dhparam: fs.readFileSync(config.dhparampath, 'utf8'),
requestCert: false,
rejectUnauthorized: false
};
var app = express();
var server = require('https').createServer(options, app);
} else {
var app = express();
var server = require('http').createServer(app);
}
//logger
app.use(morgan('combined', {
"stream": logger.stream
}));
//socket io
var io = require('socket.io')(server);
//others
var realtime = require("./lib/realtime.js");
//assign socket io to realtime
realtime.io = io;
//methodOverride
app.use(methodOverride('_method'));
// create application/json parser
var jsonParser = bodyParser.json({
limit: 1024 * 1024 * 10 // 10 mb
});
// create application/x-www-form-urlencoded parser
var urlencodedParser = bodyParser.urlencoded({
extended: false,
limit: 1024 * 1024 * 10 // 10 mb
});
//session store
var sessionStore = new SequelizeStore({
db: models.sequelize
});
//compression
app.use(compression());
// use hsts to tell https users stick to this
app.use(helmet.hsts({
maxAge: 31536000 * 1000, // 365 days
includeSubdomains: true,
preload: true
}));
i18n.configure({
locales: ['en', 'zh', 'fr', 'de', 'ja', 'es', 'el', 'pt', 'it', 'tr', 'ru', 'nl', 'hr', 'pl', 'uk', 'hi', 'sv'],
cookie: 'locale',
directory: __dirname + '/locales'
});
app.use(cookieParser());
app.use(i18n.init);
// routes without sessions
// static files
app.use('/', express.static(__dirname + '/public', { maxAge: config.staticcachetime }));
app.use('/vendor/', express.static(__dirname + '/bower_components', { maxAge: config.staticcachetime }));
//session
app.use(session({
name: config.sessionname,
secret: config.sessionsecret,
resave: false, //don't save session if unmodified
saveUninitialized: true, //always create session to ensure the origin
rolling: true, // reset maxAge on every response
cookie: {
maxAge: config.sessionlife
},
store: sessionStore
}));
// session resumption
var tlsSessionStore = {};
server.on('newSession', function (id, data, cb) {
tlsSessionStore[id.toString('hex')] = data;
cb();
});
server.on('resumeSession', function (id, cb) {
cb(null, tlsSessionStore[id.toString('hex')] || null);
});
//middleware which blocks requests when we're too busy
app.use(function (req, res, next) {
if (toobusy()) {
response.errorServiceUnavailable(res);
} else {
next();
}
});
//passport
app.use(passport.initialize());
app.use(passport.session());
//serialize and deserialize
passport.serializeUser(function (user, done) {
logger.info('serializeUser: ' + user.id);
return done(null, user.id);
});
passport.deserializeUser(function (id, done) {
models.User.findOne({
where: {
id: id
}
}).then(function (user) {
logger.info('deserializeUser: ' + user.id);
return done(null, user);
}).catch(function (err) {
logger.error(err);
return done(err, null);
});
});
// redirect url with trailing slashes
app.use(function(req, res, next) {
if ("GET" == req.method && req.path.substr(-1) == '/' && req.path.length > 1) {
var query = req.url.slice(req.path.length);
res.redirect(301, config.serverurl + req.path.slice(0, -1) + query);
} else {
next();
}
});
// routes need sessions
//template files
app.set('views', __dirname + '/public/views');
//set render engine
app.engine('ejs', ejs.renderFile);
//set view engine
app.set('view engine', 'ejs');
//get index
app.get("/", response.showIndex);
//get 403 forbidden
app.get("/403", function (req, res) {
response.errorForbidden(res);
});
//get 404 not found
app.get("/404", function (req, res) {
response.errorNotFound(res);
});
//get 500 internal error
app.get("/500", function (req, res) {
response.errorInternalError(res);
});
//get status
app.get("/status", function (req, res, next) {
realtime.getStatus(function (data) {
res.set({
'Cache-Control': 'private', // only cache by client
'X-Robots-Tag': 'noindex, nofollow' // prevent crawling
});
res.send(data);
});
});
//get status
app.get("/temp", function (req, res) {
var host = req.get('host');
if (config.alloworigin.indexOf(host) == -1)
response.errorForbidden(res);
else {
var tempid = req.query.tempid;
if (!tempid)
response.errorForbidden(res);
else {
models.Temp.findOne({
where: {
id: tempid
}
}).then(function (temp) {
if (!temp)
response.errorNotFound(res);
else {
res.header("Access-Control-Allow-Origin", "*");
res.send({
temp: temp.data
});
temp.destroy().catch(function (err) {
if (err)
logger.error('remove temp failed: ' + err);
});
}
}).catch(function (err) {
logger.error(err);
return response.errorInternalError(res);
});
}
}
});
//post status
app.post("/temp", urlencodedParser, function (req, res) {
var host = req.get('host');
if (config.alloworigin.indexOf(host) == -1)
response.errorForbidden(res);
else {
var data = req.body.data;
if (!data)
response.errorForbidden(res);
else {
if (config.debug)
logger.info('SERVER received temp from [' + host + ']: ' + req.body.data);
models.Temp.create({
data: data
}).then(function (temp) {
if (temp) {
res.header("Access-Control-Allow-Origin", "*");
res.send({
status: 'ok',
id: temp.id
});
} else
response.errorInternalError(res);
}).catch(function (err) {
logger.error(err);
return response.errorInternalError(res);
});
}
}
});
function setReturnToFromReferer(req) {
var referer = req.get('referer');
if (!req.session) req.session = {};
req.session.returnTo = referer;
}
//facebook auth
if (config.facebook) {
app.get('/auth/facebook', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('facebook')(req, res, next);
});
//facebook auth callback
app.get('/auth/facebook/callback',
passport.authenticate('facebook', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
}
//twitter auth
if (config.twitter) {
app.get('/auth/twitter', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('twitter')(req, res, next);
});
//twitter auth callback
app.get('/auth/twitter/callback',
passport.authenticate('twitter', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
}
//github auth
if (config.github) {
app.get('/auth/github', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('github')(req, res, next);
});
//github auth callback
app.get('/auth/github/callback',
passport.authenticate('github', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
//github callback actions
app.get('/auth/github/callback/:noteId/:action', response.githubActions);
}
//gitlab auth
if (config.gitlab) {
app.get('/auth/gitlab', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('gitlab')(req, res, next);
});
//gitlab auth callback
app.get('/auth/gitlab/callback',
passport.authenticate('gitlab', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
//gitlab callback actions
app.get('/auth/gitlab/callback/:noteId/:action', response.gitlabActions);
}
//dropbox auth
if (config.dropbox) {
app.get('/auth/dropbox', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('dropbox-oauth2')(req, res, next);
});
//dropbox auth callback
app.get('/auth/dropbox/callback',
passport.authenticate('dropbox-oauth2', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
}
//google auth
if (config.google) {
app.get('/auth/google', function (req, res, next) {
setReturnToFromReferer(req);
passport.authenticate('google', { scope: ['profile'] })(req, res, next);
});
//google auth callback
app.get('/auth/google/callback',
passport.authenticate('google', {
successReturnToOrRedirect: config.serverurl + '/',
failureRedirect: config.serverurl + '/'
}));
}
//logout
app.get('/logout', function (req, res) {
if (config.debug && req.isAuthenticated())
logger.info('user logout: ' + req.user.id);
req.logout();
res.redirect(config.serverurl + '/');
});
//get history
app.get('/history', history.historyGet);
//post history
app.post('/history', urlencodedParser, history.historyPost);
//post history by note id
app.post('/history/:noteId', urlencodedParser, history.historyPost);
//delete history
app.delete('/history', history.historyDelete);
//delete history by note id
app.delete('/history/:noteId', history.historyDelete);
//get me info
app.get('/me', function (req, res) {
if (req.isAuthenticated()) {
models.User.findOne({
where: {
id: req.user.id
}
}).then(function (user) {
if (!user)
return response.errorNotFound(res);
var profile = models.User.parseProfile(user.profile);
res.send({
status: 'ok',
id: req.user.id,
name: profile.name,
photo: profile.photo
});
}).catch(function (err) {
logger.error('read me failed: ' + err);
return response.errorInternalError(res);
});
} else {
res.send({
status: 'forbidden'
});
}
});
//upload to imgur
app.post('/uploadimage', function (req, res) {
var form = new formidable.IncomingForm();
if (config.imageUploadType === 'filesystem') {
form.uploadDir = "public/uploads";
form.keepExtensions = true;
}
form.parse(req, function (err, fields, files) {
if (err || !files.image || !files.image.path) {
response.errorForbidden(res);
} else {
if (config.debug)
logger.info('SERVER received uploadimage: ' + JSON.stringify(files.image));
try {
switch (config.imageUploadType) {
case 'filesystem':
res.send({
link: files.image.path.match(/^public(.+$)/)[1]
});
break;
case 'imgur':
default:
imgur.setClientId(config.imgur.clientID);
imgur.uploadFile(files.image.path)
.then(function (json) {
if (config.debug)
logger.info('SERVER uploadimage success: ' + JSON.stringify(json));
res.send({
link: json.data.link.replace(/^http:\/\//i, 'https://')
});
})
.catch(function (err) {
logger.error(err);
return res.status(500).end('upload image error');
});
break;
}
} catch (err) {
logger.error(err);
return res.status(500).end('upload image error');
}
}
});
});
//get new note
app.get("/new", response.newNote);
//get publish note
app.get("/s/:shortid", response.showPublishNote);
//publish note actions
app.get("/s/:shortid/:action", response.publishNoteActions);
//get publish slide
app.get("/p/:shortid", response.showPublishSlide);
//publish slide actions
app.get("/p/:shortid/:action", response.publishSlideActions);
//get note by id
app.get("/:noteId", response.showNote);
//note actions
app.get("/:noteId/:action", response.noteActions);
//note actions with action id
app.get("/:noteId/:action/:actionId", response.noteActions);
// response not found if no any route matches
app.get('*', function (req, res) {
response.errorNotFound(res);
});
//socket.io secure
io.use(realtime.secure);
//socket.io auth
io.use(passportSocketIo.authorize({
cookieParser: cookieParser,
key: config.sessionname,
secret: config.sessionsecret,
store: sessionStore,
success: realtime.onAuthorizeSuccess,
fail: realtime.onAuthorizeFail
}));
//socket.io heartbeat
io.set('heartbeat interval', config.heartbeatinterval);
io.set('heartbeat timeout', config.heartbeattimeout);
//socket.io connection
io.sockets.on('connection', realtime.connection);
//listen
function startListen() {
server.listen(config.port, function () {
var schema = config.usessl ? 'HTTPS' : 'HTTP';
logger.info('%s Server listening at port %d', schema, config.port);
config.maintenance = false;
});
}
// sync db then start listen
models.sequelize.sync().then(function () {
// check if realtime is ready
if (history.isReady() && realtime.isReady()) {
models.Revision.checkAllNotesRevision(function (err, notes) {
if (err) throw new Error(err);
if (!notes || notes.length <= 0) return startListen();
});
} else {
throw new Error('server still not ready after db synced');
}
});
// log uncaught exception
process.on('uncaughtException', function (err) {
logger.error('An uncaught exception has occured.');
logger.error(err);
logger.error('Process will exit now.');
process.exit(1);
});
// gracefully exit
process.on('SIGINT', function () {
config.maintenance = true;
// disconnect all socket.io clients
Object.keys(io.sockets.sockets).forEach(function (key) {
var socket = io.sockets.sockets[key];
// notify client server going into maintenance status
socket.emit('maintenance');
setTimeout(function () {
socket.disconnect(true);
}, 0);
});
var checkCleanTimer = setInterval(function () {
if (history.isReady() && realtime.isReady()) {
models.Revision.checkAllNotesRevision(function (err, notes) {
if (err) throw new Error(err);
if (!notes || notes.length <= 0) {
clearInterval(checkCleanTimer);
return process.exit(0);
}
});
}
}, 100);
});
| Join image path with config.serverurl
| app.js | Join image path with config.serverurl | <ide><path>pp.js
<ide> try {
<ide> switch (config.imageUploadType) {
<ide> case 'filesystem':
<add> var path = require('path');
<add>
<ide> res.send({
<del> link: files.image.path.match(/^public(.+$)/)[1]
<add> link: path.join(config.serverurl, files.image.path.match(/^public(.+$)/)[1])
<ide> });
<ide>
<ide> break; |
|
JavaScript | agpl-3.0 | 0e9b509122a790db61eae6041f0462fb4b96d7c6 | 0 | jakebailey/PrairieLearn,mwest1066/PrairieLearn,tbretl/PrairieLearn,mwest1066/PrairieLearn,mwest1066/PrairieLearn,tbretl/PrairieLearn,tbretl/PrairieLearn,mwest1066/PrairieLearn,jakebailey/PrairieLearn,parasgithub/PrairieLearn,jakebailey/PrairieLearn,rbessick5/PrairieLearn,rbessick5/PrairieLearn,parasgithub/PrairieLearn,parasgithub/PrairieLearn,parasgithub/PrairieLearn,rbessick5/PrairieLearn,rbessick5/PrairieLearn,jakebailey/PrairieLearn,tbretl/PrairieLearn,rbessick5/PrairieLearn,mwest1066/PrairieLearn,tbretl/PrairieLearn,parasgithub/PrairieLearn | var ERR = require('async-stacktrace');
var _ = require('lodash');
var async = require('async');
var ejs = require('ejs');
var path = require('path');
var debug = require('debug')('prairielearn:' + path.basename(__filename, '.js'));
var error = require('../lib/error');
var logger = require('../lib/logger');
var sqldb = require('../lib/sqldb');
var question = require('../lib/question');
var externalGradingSocket = require('../lib/external-grading-socket');
/**
* Assessment module.
* @module assessment
*/
module.exports = {
/**
* Render the "text" property of an assessment.
*
* @param {Object} assessment - The assessment to render the text for.
* @param {string} urlPrefix - The current server urlPrefix.
* @param {function} callback - A callback(err, html) function.
*/
renderText(assessment, urlPrefix, callback) {
if (!assessment.text) return callback(null, null);
var context = {
clientFilesCourse: urlPrefix + '/clientFilesCourse',
clientFilesCourseInstance: urlPrefix + '/clientFilesCourseInstance',
clientFilesAssessment: urlPrefix + '/assessment/' + assessment.id + '/clientFilesAssessment',
};
var assessment_text_templated;
try {
assessment_text_templated = ejs.render(assessment.text, context);
} catch (e) {
return ERR(e, callback);
}
callback(null, assessment_text_templated);
},
/*
* Create a new assessment instance and all the questions in it.
*
* @param {number} assessment_id - The assessment to create the assessment instance for.
* @param {number} user_id - The user who will own the new assessment instance.
* @param {number} authn_user_id - The current authenticated user.
* @param {string} mode - The mode for the new assessment instance.
* @param {?number} time_limit_min - The time limit for the new assessment instance.
* @param {Date} date - The date of creation for the new assessment instance.
* @param {Object} course - The course for the new assessment instance.
* @param {function} callback - A callback(err, assessment_instance_id) function.
*/
makeAssessmentInstance(assessment_id, user_id, authn_user_id, mode, time_limit_min, date, course, callback) {
sqldb.beginTransaction((err, client, done) => {
if (ERR(err, callback)) return;
var assessment_instance_id, new_instance_question_ids;
async.series([
(callback) => {
var params = [
assessment_id,
user_id,
authn_user_id,
mode,
time_limit_min,
date,
];
sqldb.callWithClientOneRow(client, 'assessment_instances_insert', params, (err, result) => {
if (ERR(err, callback)) return;
assessment_instance_id = result.rows[0].assessment_instance_id;
new_instance_question_ids = result.rows[0].new_instance_question_ids;
callback(null);
});
},
(callback) => {
async.each(new_instance_question_ids, (instance_question_id, callback) => {
const question_id = null; // use instance_question_id to determine the question
const options = {};
const require_open = true;
question._ensureVariantWithClient(client, question_id, instance_question_id, user_id, authn_user_id, course, options, require_open, (err, _variant) => {
if (ERR(err, callback)) return;
callback(null);
});
}, (err) => {
if (ERR(err, callback)) return;
callback(null);
});
},
], (err) => {
sqldb.endTransaction(client, done, err, (err) => {
if (ERR(err, callback)) return;
callback(null, assessment_instance_id);
});
});
});
},
/*
* Add new questions to the assessment instance and regrade it if necessary.
*
* @param {number} assessment_instance_id - The assessment instance to grade.
* @param {number} authn_user_id - The current authenticated user.
* @param {function} callback - A callback(err, updated) function.
*/
update(assessment_instance_id, authn_user_id, callback) {
debug('update()');
let updated;
sqldb.beginTransaction((err, client, done) => {
if (ERR(err, callback)) return;
debug('inside transaction');
async.series([
(callback) => {
sqldb.callWithClient(client, 'assessment_instances_lock', [assessment_instance_id], (err) => {
if (ERR(err, callback)) return;
debug('locked');
callback(null);
});
},
(callback) => {
const params = [
assessment_instance_id,
authn_user_id,
];
sqldb.callWithClientOneRow(client, 'assessment_instances_update', params, (err, result) => {
if (ERR(err, callback)) return;
updated = result.rows[0].updated;
debug('updated:', updated);
callback(null);
});
},
(callback) => {
if (!updated) return callback(null); // skip if not updated
// if updated, regrade to pick up max_points changes, etc.
const params = [
assessment_instance_id,
authn_user_id,
null, // credit
true, // only_log_if_score_updated
];
sqldb.callWithClientOneRow(client, 'assessment_instances_grade', params, (err, _result) => {
if (ERR(err, callback)) return;
debug('graded');
callback(null);
});
},
], (err) => {
sqldb.endTransaction(client, done, err, (err) => {
if (ERR(err, callback)) return;
debug('transaction ended');
callback(null, updated);
});
});
});
},
/*
* Internal worker for gradeVariant(). Do not call directly.
* @protected
*
* @param {Object} client - SQL client that must be inside a locked transaction.
* @param {number} assessment_instance_id - The assessment instance to grade.
* @param {number} authn_user_id - The current authenticated user.
* @param {boolean} close - Whether to close the assessment instance after grading.
* @param {function} callback - A callback(err) function.
*/
_gradeAssessmentInstanceWithClient(client, assessment_instance_id, authn_user_id, close, callback) {
debug('_gradeAssessmentInstanceWithClient()');
let rows;
async.series([
(callback) => {
sqldb.callWithClient(client, 'variants_select_for_assessment_instance_grading', [assessment_instance_id], (err, result) => {
if (ERR(err, callback)) return;
rows = result.rows;
debug('_gradeAssessmentInstanceWithClient()', 'selected variants', 'count:', rows.length);
callback(null);
});
},
(callback) => {
async.eachSeries(rows, (row, callback) => {
debug('_gradeAssessmentInstanceWithClient()', 'loop', 'variant.id:', row.variant.id);
const check_submission_id = null;
question._gradeVariantWithClient(client, row.variant, check_submission_id, row.question, row.course, authn_user_id, (err) => {
if (ERR(err, callback)) return;
callback(null);
});
}, (err) => {
if (ERR(err, callback)) return;
debug('_gradeAssessmentInstanceWithClient()', 'finished grading');
callback(null);
});
},
(callback) => {
if (!close) return callback(null);
sqldb.callWithClient(client, 'assessment_instances_close', [assessment_instance_id, authn_user_id], (err) => {
if (ERR(err, callback)) return;
callback(null);
});
},
], (err) => {
if (ERR(err, callback)) return;
debug('_gradeAssessmentInstanceWithClient()', 'success');
callback(null);
});
},
/**
* Grade all questions in an assessment instance and (optionally) close it.
*
* @param {number} assessment_instance_id - The assessment instance to grade.
* @param {number} authn_user_id - The current authenticated user.
* @param {boolean} close - Whether to close the assessment instance after grading.
* @param {function} callback - A callback(err) function.
*/
gradeAssessmentInstance(assessment_instance_id, authn_user_id, close, callback) {
debug('gradeAssessmentInstance()');
sqldb.beginTransaction((err, client, done) => {
if (ERR(err, callback)) return;
async.series([
(callback) => {
sqldb.callWithClient(client, 'assessment_instances_lock', [assessment_instance_id], (err) => {
if (ERR(err, callback)) return;
debug('gradeAssessmentInstance()', 'locked');
callback(null);
});
},
(callback) => {
this._gradeAssessmentInstanceWithClient(client, assessment_instance_id, authn_user_id, close, (err) => {
if (ERR(err, callback)) return;
debug('gradeAssessmentInstance()', 'finished _gradeAssessmentInstanceWithClient()');
callback(null);
});
},
], (err) => {
sqldb.endTransaction(client, done, err, (err) => {
if (ERR(err, callback)) return;
debug('gradeAssessmentInstance()', 'success');
callback(null);
});
});
});
},
/**
* Generates an object that can be passed to assessment.processGradingResult.
* This function can be passed a parsed results object, or it can be passed a
* string or buffer to attempt to parse it and mark the grading job as failed when
* parsing fails.
*
* @param {Object|string|Buffer} data - The grading results
*/
makeGradingResult(data) {
if (typeof data === 'string' || Buffer.isBuffer(data)) {
try {
data = JSON.parse(data);
} catch (e) {
return {
gradingId: data.job_id,
grading: {
score: 0,
startTime: null,
endTime: null,
feedback: {
succeeded: false,
},
},
};
}
}
if (!data.succeeded) {
return {
gradingId: data.job_id,
grading: {
startTime: data.start_time || null,
endTime: data.end_time || null,
score: 0,
feedback: data
}
};
}
// TODO: once we have better error handling in place, account for these errors
/*
if (!data.results) {
return callback(new Error('results.json did not contain \'results\' object.'));
}
if (typeof data.results.score !== 'number' || Number.isNaN(data.results.score)) {
return callback(new Error('Score did not exist or is not a number!'));
}
*/
let score = 0.0;
if (data.results && typeof data.results.score === 'number' && !Number.isNaN(data.results.score)) {
score = data.results.score;
}
return {
gradingId: data.job_id,
grading: {
startTime: data.start_time,
endTime: data.end_time,
score: score,
feedback: data
}
};
},
/**
* Process the result of an external grading job.
*
* @param {Obect} content - The grading job data to process.
*/
processGradingResult(content) {
async.series([
(callback) => {
if (!_(content.grading).isObject()) {
return callback(error.makeWithData('invalid grading', {content: content}));
}
if (!_(content.grading.score).isNumber()) {
return callback(error.makeWithData('invalid grading.score', {content: content}));
}
if (content.grading.score < 0 || content.grading.score > 1) {
return callback(error.makeWithData('grading.score out of range', {content: content}));
}
if (_(content.grading).has('feedback') && !_(content.grading.feedback).isObject()) {
return callback(error.makeWithData('invalid grading.feedback', {content: content}));
}
const params = [
content.gradingId,
content.grading.score,
content.grading.feedback,
content.grading.startTime,
content.grading.endTime,
];
sqldb.call('grading_jobs_process_external', params, (err) => {
if (ERR(err, callback)) return;
callback(null);
});
},
], (err) => {
if (ERR(err, () => {})) {
// FIXME: call sprocs/errors_insert here
logger.error('processGradingResult: error',
{message: err.message, stack: err.stack, data: JSON.stringify(err.data)});
}
externalGradingSocket.gradingLogStatusUpdated(content.gradingId);
});
},
};
| lib/assessment.js | var ERR = require('async-stacktrace');
var _ = require('lodash');
var async = require('async');
var ejs = require('ejs');
var path = require('path');
var debug = require('debug')('prairielearn:' + path.basename(__filename, '.js'));
var error = require('../lib/error');
var logger = require('../lib/logger');
var sqldb = require('../lib/sqldb');
var question = require('../lib/question');
var externalGradingSocket = require('../lib/external-grading-socket');
/**
* Assessment module.
* @module assessment
*/
module.exports = {
/**
* Render the "text" property of an assessment.
*
* @param {Object} assessment - The assessment to render the text for.
* @param {string} urlPrefix - The current server urlPrefix.
* @param {function} callback - A callback(err, html) function.
*/
renderText(assessment, urlPrefix, callback) {
if (!assessment.text) return callback(null, null);
var context = {
clientFilesCourse: urlPrefix + '/clientFilesCourse',
clientFilesCourseInstance: urlPrefix + '/clientFilesCourseInstance',
clientFilesAssessment: urlPrefix + '/assessment/' + assessment.id + '/clientFilesAssessment',
};
var assessment_text_templated;
try {
assessment_text_templated = ejs.render(assessment.text, context);
} catch (e) {
return ERR(e, callback);
}
callback(null, assessment_text_templated);
},
/*
* Create a new assessment instance and all the questions in it.
*
* @param {number} assessment_id - The assessment to create the assessment instance for.
* @param {number} user_id - The user who will own the new assessment instance.
* @param {number} authn_user_id - The current authenticated user.
* @param {string} mode - The mode for the new assessment instance.
* @param {?number} time_limit_min - The time limit for the new assessment instance.
* @param {Date} date - The date of creation for the new assessment instance.
* @param {Object} course - The course for the new assessment instance.
* @param {function} callback - A callback(err, assessment_instance_id) function.
*/
makeAssessmentInstance(assessment_id, user_id, authn_user_id, mode, time_limit_min, date, course, callback) {
sqldb.beginTransaction((err, client, done) => {
if (ERR(err, callback)) return;
var assessment_instance_id, new_instance_question_ids;
async.series([
(callback) => {
var params = [
assessment_id,
user_id,
authn_user_id,
mode,
time_limit_min,
date,
];
sqldb.callWithClientOneRow(client, 'assessment_instances_insert', params, (err, result) => {
if (ERR(err, callback)) return;
assessment_instance_id = result.rows[0].assessment_instance_id;
new_instance_question_ids = result.rows[0].new_instance_question_ids;
callback(null);
});
},
(callback) => {
async.each(new_instance_question_ids, (instance_question_id, callback) => {
const question_id = null; // use instance_question_id to determine the question
const options = {};
const require_open = true;
question._ensureVariantWithClient(client, question_id, instance_question_id, user_id, authn_user_id, course, options, require_open, (err, _variant) => {
if (ERR(err, callback)) return;
callback(null);
});
}, (err) => {
if (ERR(err, callback)) return;
callback(null);
});
},
], (err) => {
sqldb.endTransaction(client, done, err, (err) => {
if (ERR(err, callback)) return;
callback(null, assessment_instance_id);
});
});
});
},
/*
* Add new questions to the assessment instance and regrade it if necessary.
*
* @param {number} assessment_instance_id - The assessment instance to grade.
* @param {number} authn_user_id - The current authenticated user.
* @param {function} callback - A callback(err, updated) function.
*/
update(assessment_instance_id, authn_user_id, callback) {
debug('update()');
let updated;
sqldb.beginTransaction((err, client, done) => {
if (ERR(err, callback)) return;
debug('inside transaction');
async.series([
(callback) => {
sqldb.callWithClient(client, 'assessment_instances_lock', [assessment_instance_id], (err) => {
if (ERR(err, callback)) return;
debug('locked');
callback(null);
});
},
(callback) => {
const params = [
assessment_instance_id,
authn_user_id,
];
sqldb.callWithClientOneRow(client, 'assessment_instances_update', params, (err, result) => {
if (ERR(err, callback)) return;
updated = result.rows[0].updated;
debug('updated:', updated);
callback(null);
});
},
(callback) => {
if (!updated) return callback(null); // skip if not updated
// if updated, regrade to pick up max_points changes, etc.
const params = [
assessment_instance_id,
authn_user_id,
null, // credit
true, // only_log_if_score_updated
];
sqldb.callWithClientOneRow(client, 'assessment_instances_grade', params, (err, _result) => {
if (ERR(err, callback)) return;
debug('graded');
callback(null);
});
},
], (err) => {
sqldb.endTransaction(client, done, err, (err) => {
if (ERR(err, callback)) return;
debug('transaction ended');
callback(null, updated);
});
});
});
},
/*
* Internal worker for gradeVariant(). Do not call directly.
* @protected
*
* @param {Object} client - SQL client that must be inside a locked transaction.
* @param {number} assessment_instance_id - The assessment instance to grade.
* @param {number} authn_user_id - The current authenticated user.
* @param {boolean} close - Whether to close the assessment instance after grading.
* @param {function} callback - A callback(err) function.
*/
_gradeAssessmentInstanceWithClient(client, assessment_instance_id, authn_user_id, close, callback) {
debug('_gradeAssessmentInstanceWithClient()');
let rows;
async.series([
(callback) => {
sqldb.callWithClient(client, 'variants_select_for_assessment_instance_grading', [assessment_instance_id], (err, result) => {
if (ERR(err, callback)) return;
rows = result.rows;
debug('_gradeAssessmentInstanceWithClient()', 'selected variants', 'count:', rows.length);
callback(null);
});
},
(callback) => {
async.eachSeries(rows, (row, callback) => {
debug('_gradeAssessmentInstanceWithClient()', 'loop', 'variant.id:', row.variant.id);
const check_submission_id = null;
question._gradeVariantWithClient(client, row.variant, check_submission_id, row.question, row.course, authn_user_id, (err) => {
if (ERR(err, callback)) return;
callback(null);
});
}, (err) => {
if (ERR(err, callback)) return;
debug('_gradeAssessmentInstanceWithClient()', 'finished grading');
callback(null);
});
},
(callback) => {
if (!close) return callback(null);
sqldb.callWithClient(client, 'assessment_instances_close', [assessment_instance_id, authn_user_id], (err) => {
if (ERR(err, callback)) return;
callback(null);
});
},
], (err) => {
if (ERR(err, callback)) return;
debug('_gradeAssessmentInstanceWithClient()', 'success');
callback(null);
});
},
/**
* Grade all questions in an assessment instance and (optionally) close it.
*
* @param {number} assessment_instance_id - The assessment instance to grade.
* @param {number} authn_user_id - The current authenticated user.
* @param {boolean} close - Whether to close the assessment instance after grading.
* @param {function} callback - A callback(err) function.
*/
gradeAssessmentInstance(assessment_instance_id, authn_user_id, close, callback) {
debug('gradeAssessmentInstance()');
sqldb.beginTransaction((err, client, done) => {
if (ERR(err, callback)) return;
async.series([
(callback) => {
sqldb.callWithClient(client, 'assessment_instances_lock', [assessment_instance_id], (err) => {
if (ERR(err, callback)) return;
debug('gradeAssessmentInstance()', 'locked');
callback(null);
});
},
(callback) => {
this._gradeAssessmentInstanceWithClient(client, assessment_instance_id, authn_user_id, close, (err) => {
if (ERR(err, callback)) return;
debug('gradeAssessmentInstance()', 'finished _gradeAssessmentInstanceWithClient()');
callback(null);
});
},
], (err) => {
sqldb.endTransaction(client, done, err, (err) => {
if (ERR(err, callback)) return;
debug('gradeAssessmentInstance()', 'success');
callback(null);
});
});
});
},
/**
* Generates an object that can be passed to assessment.processGradingResult.
* This function can be passed a parsed results object, or it can be passed a
* string or buffer to attempt to parse it and mark the grading job as failed when
* parsing fails.
*
* @param {Object|string} data - The grading results
*/
makeGradingResult(data) {
if (typeof data === 'string' || Buffer.isBuffer(data)) {
try {
data = JSON.parse(data);
} catch (e) {
return {
gradingId: data.job_id,
grading: {
score: 0,
startTime: null,
endTime: null,
feedback: {
succeeded: false,
},
},
};
}
}
if (!data.succeeded) {
return {
gradingId: data.job_id,
grading: {
startTime: data.start_time || null,
endTime: data.end_time || null,
score: 0,
feedback: data
}
};
}
// TODO: once we have better error handling in place, account for these errors
/*
if (!data.results) {
return callback(new Error('results.json did not contain \'results\' object.'));
}
if (typeof data.results.score !== 'number' || Number.isNaN(data.results.score)) {
return callback(new Error('Score did not exist or is not a number!'));
}
*/
let score = 0.0;
if (data.results && typeof data.results.score === 'number' && !Number.isNaN(data.results.score)) {
score = data.results.score;
}
return {
gradingId: data.job_id,
grading: {
startTime: data.start_time,
endTime: data.end_time,
score: score,
feedback: data
}
};
},
/**
* Process the result of an external grading job.
*
* @param {Obect} content - The grading job data to process.
*/
processGradingResult(content) {
console.log(content);
async.series([
(callback) => {
if (!_(content.grading).isObject()) {
return callback(error.makeWithData('invalid grading', {content: content}));
}
if (!_(content.grading.score).isNumber()) {
return callback(error.makeWithData('invalid grading.score', {content: content}));
}
if (content.grading.score < 0 || content.grading.score > 1) {
return callback(error.makeWithData('grading.score out of range', {content: content}));
}
if (_(content.grading).has('feedback') && !_(content.grading.feedback).isObject()) {
return callback(error.makeWithData('invalid grading.feedback', {content: content}));
}
const params = [
content.gradingId,
content.grading.score,
content.grading.feedback,
content.grading.startTime,
content.grading.endTime,
];
sqldb.call('grading_jobs_process_external', params, (err) => {
if (ERR(err, callback)) return;
callback(null);
});
},
], (err) => {
if (ERR(err, () => {})) {
// FIXME: call sprocs/errors_insert here
logger.error('processGradingResult: error',
{message: err.message, stack: err.stack, data: JSON.stringify(err.data)});
}
externalGradingSocket.gradingLogStatusUpdated(content.gradingId);
});
},
};
| Fix linter error; improve documentation
| lib/assessment.js | Fix linter error; improve documentation | <ide><path>ib/assessment.js
<ide> * string or buffer to attempt to parse it and mark the grading job as failed when
<ide> * parsing fails.
<ide> *
<del> * @param {Object|string} data - The grading results
<add> * @param {Object|string|Buffer} data - The grading results
<ide> */
<ide> makeGradingResult(data) {
<ide> if (typeof data === 'string' || Buffer.isBuffer(data)) {
<ide> * @param {Obect} content - The grading job data to process.
<ide> */
<ide> processGradingResult(content) {
<del> console.log(content);
<ide> async.series([
<ide> (callback) => {
<ide> if (!_(content.grading).isObject()) { |
|
Java | bsd-3-clause | bb7bca057d83911a3afe5903f00adc68e62b8d0a | 0 | mdcao/japsa,mdcao/japsa | /*****************************************************************************
* Copyright (c) Minh Duc Cao, Monash Uni & UQ, All rights reserved. *
* *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* *
* 1. Redistributions of source code must retain the above copyright notice, *
* this list of conditions and the following disclaimer. *
* 2. Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* 3. Neither the names of the institutions nor the names of the contributors*
* may be used to endorse or promote products derived from this software *
* without specific prior written permission. *
* *
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS *
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, *
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR *
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR *
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, *
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, *
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR *
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF *
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING *
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS *
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *
****************************************************************************/
/************************** REVISION HISTORY **************************
* 18/10/2013 - Minh Duc Cao: Created
*
****************************************************************************/
package japsadev.tools;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.SAMRecordIterator;
import htsjdk.samtools.SamReader;
import htsjdk.samtools.SamReaderFactory;
import htsjdk.samtools.ValidationStringency;
import japsa.seq.Alphabet;
import japsa.seq.Sequence;
import japsa.seq.SequenceReader;
import japsa.util.CommandLine;
import japsa.util.deploy.Deployable;
import japsa.bio.hts.scaffold.AlignmentRecord;
import japsa.bio.hts.scaffold.Contig;
@Deployable(
scriptName = "jsa.dev.flankDetect",
scriptDesc = "Detect flanking sequences from both ends of nanopore reads"
)
public class FlankSeqsDetectorCmd extends CommandLine{
public FlankSeqsDetectorCmd(){
super();
Deployable annotation = getClass().getAnnotation(Deployable.class);
setUsage(annotation.scriptName() + " [options]");
setDesc(annotation.scriptDesc());
addString("flankFile",null,"Flank sequences file, maximum 2 sequences",true);
addString("bamFile",null,"Bam file",true);
addDouble("qual", 1, "Mininum quality");
addInt("insert", 10, "Minimum length of insert sequence in-between 2 flanking sequences");
addInt("tips", 20, "Maximum percentage of the overhangs compared to the corresponding flanking sequence");
addDouble("cover", 80, "Mininum percentage of flank sequence coverage for a valid alignment");
addStdHelp();
}
/**
* @param args
*/
public static void main(String[] args) throws IOException{
/*********************** Setting up script ****************************/
/*********************** Setting up script ****************************/
CommandLine cmdLine = new FlankSeqsDetectorCmd();
args = cmdLine.stdParseLine(args);
/**********************************************************************/
String flankSeqsFile= cmdLine.getStringVal("flankFile");
String bamFile = cmdLine.getStringVal("bamFile");
double qual = cmdLine.getDoubleVal("qual"),
flkCov = cmdLine.getDoubleVal("cover");
int insertLength = cmdLine.getIntVal("insert"),
tipsPercentage = cmdLine.getIntVal("tips");
SequenceReader seqReader = SequenceReader.getReader(flankSeqsFile);
Sequence seq;
ArrayList<Contig> flankSeqs = new ArrayList<>();
int index=0;
while ((seq = seqReader.nextSequence(Alphabet.DNA())) != null)
flankSeqs.add(new Contig(index++,seq));
seqReader.close();
if(flankSeqs.size() > 2){
System.err.println("More than 2 sequences!");
System.exit(1);
}
SamReaderFactory.setDefaultValidationStringency(ValidationStringency.SILENT);
SamReader reader = SamReaderFactory.makeDefault().open(new File(bamFile));
SAMRecordIterator iter = reader.iterator();
SAMRecord rec;
AlignmentRecord curAlnRec;
String curReadName = "";
HashMap<String, HashMap<Contig, AlignmentRecord>> map = new HashMap<>();
// HashMap<String, Sequence> readsMap = new HashMap<>();
while (iter.hasNext()) {
rec = iter.next();
curReadName=rec.getReadName();
// if(!readsMap.containsKey(curReadName)){
//
// }
if (rec.getReadUnmappedFlag() || rec.getMappingQuality() < qual){
if(!map.containsKey(curReadName))
map.put(curReadName, new HashMap<>());
continue;
}
Contig flk = flankSeqs.get(rec.getReferenceIndex());
curAlnRec=new AlignmentRecord(rec, flk);
if(curAlnRec.refEnd-curAlnRec.refStart < (double)flkCov*flk.length()/100.0)
continue;
//not too far from the tip of read
else if(Math.min(-curAlnRec.readAlignmentEnd()+curAlnRec.readLength, curAlnRec.readAlignmentStart()) > (double)flk.length()*tipsPercentage/100.0){
continue;
}
HashMap<Contig, AlignmentRecord> data;
if(!map.containsKey(curReadName) || map.get(curReadName).isEmpty()){
data=new HashMap<>();
data.put(flk, curAlnRec);
map.put(curReadName, data);
}else{
data=map.get(curReadName);
if(!data.containsKey(flk)){
data.put(flk, curAlnRec);
}else if(data.get(flk).score < curAlnRec.score){
data.replace(flk, curAlnRec);
}
}
}// while
iter.close();
/**********************************************************************/
int totReadNum = map.keySet().size();
ArrayList<String> flank0 = new ArrayList<String>(),
flank1_0 = new ArrayList<String>(),
flank1_1 = new ArrayList<String>(),
flank2 = new ArrayList<String>();
map.keySet().stream().forEach(r->{
if(map.get(r).isEmpty())
flank0.add(r);
else if(map.get(r).keySet().size()==1){
if(map.get(r).containsKey(flankSeqs.get(0)))
flank1_0.add(r);
else if(map.get(r).containsKey(flankSeqs.get(1)))
flank1_1.add(r);
}
else if(map.get(r).keySet().size()==2){
AlignmentRecord aln0 = map.get(r).get(flankSeqs.get(0)),
aln1 = map.get(r).get(flankSeqs.get(1));
if((aln0.readStart-aln1.readStart)*(aln0.readEnd-aln1.readStart) <= 0
|| (aln0.readStart-aln1.readEnd)*(aln0.readEnd-aln1.readEnd) <= 0 )
return;
//the in-between must longer than insertLength
if(aln0.readAlignmentEnd()-aln0.readAlignmentStart() + aln1.readAlignmentEnd()-aln1.readAlignmentStart() > aln1.readLength -insertLength)
return;
flank2.add(r);
}
});
System.out.println( "Total number of reads: " + totReadNum);
System.out.println("Number of reads with 0 flank sequences: " + flank0.size());
flank0.stream().forEach(r->System.out.println(r));
System.out.println("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++");
System.out.printf("Number of reads with only 1 flank sequence %s: %d \n" , flankSeqs.get(0).getName(), flank1_0.size());
flank1_0.stream().forEach(r->System.out.println(r));
System.out.println("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++");
System.out.printf("Number of reads with only 1 flank sequence %s: %d \n" , flankSeqs.get(1).getName(), flank1_1.size());
flank1_1.stream().forEach(r->System.out.println(r));
System.out.println("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++");
System.out.println("Number of reads with 2 flank sequences: " + flank2.size());
flank2.stream().forEach(r->System.out.println(r));
System.out.println("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++");
}
}
| src/dev/java/japsadev/tools/FlankSeqsDetectorCmd.java | /*****************************************************************************
* Copyright (c) Minh Duc Cao, Monash Uni & UQ, All rights reserved. *
* *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted provided that the following conditions *
* are met: *
* *
* 1. Redistributions of source code must retain the above copyright notice, *
* this list of conditions and the following disclaimer. *
* 2. Redistributions in binary form must reproduce the above copyright *
* notice, this list of conditions and the following disclaimer in the *
* documentation and/or other materials provided with the distribution. *
* 3. Neither the names of the institutions nor the names of the contributors*
* may be used to endorse or promote products derived from this software *
* without specific prior written permission. *
* *
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS *
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, *
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR *
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR *
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, *
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, *
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR *
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF *
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING *
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS *
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *
****************************************************************************/
/************************** REVISION HISTORY **************************
* 18/10/2013 - Minh Duc Cao: Created
*
****************************************************************************/
package japsadev.tools;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.SAMRecordIterator;
import htsjdk.samtools.SamReader;
import htsjdk.samtools.SamReaderFactory;
import htsjdk.samtools.ValidationStringency;
import japsa.seq.Alphabet;
import japsa.seq.Sequence;
import japsa.seq.SequenceReader;
import japsa.util.CommandLine;
import japsa.util.deploy.Deployable;
import japsa.bio.hts.scaffold.AlignmentRecord;
import japsa.bio.hts.scaffold.Contig;
@Deployable(
scriptName = "jsa.dev.flankDetect",
scriptDesc = "Detect flanking sequences from both ends of nanopore reads"
)
public class FlankSeqsDetectorCmd extends CommandLine{
public FlankSeqsDetectorCmd(){
super();
Deployable annotation = getClass().getAnnotation(Deployable.class);
setUsage(annotation.scriptName() + " [options]");
setDesc(annotation.scriptDesc());
addString("flankFile",null,"Flank sequences file, maximum 2 sequences",true);
addString("bamFile",null,"Bam file",true);
addDouble("qual", 1, "Mininum quality");
addInt("insert", 10, "Minimum length of insert sequence in-between 2 flanking sequences");
addInt("tips", 20, "Maximum percentage of the overhangs compared to the corresponding flanking sequence");
addStdHelp();
}
/**
* @param args
*/
public static void main(String[] args) throws IOException{
/*********************** Setting up script ****************************/
/*********************** Setting up script ****************************/
CommandLine cmdLine = new FlankSeqsDetectorCmd();
args = cmdLine.stdParseLine(args);
/**********************************************************************/
String flankSeqsFile= cmdLine.getStringVal("flankFile");
String bamFile = cmdLine.getStringVal("bamFile");
double qual = cmdLine.getDoubleVal("qual");
int insertLength = cmdLine.getIntVal("insert"),
tipsPercentage = cmdLine.getIntVal("tips");
SequenceReader seqReader = SequenceReader.getReader(flankSeqsFile);
Sequence seq;
ArrayList<Contig> flankSeqs = new ArrayList<>();
int index=0;
while ((seq = seqReader.nextSequence(Alphabet.DNA())) != null)
flankSeqs.add(new Contig(index++,seq));
seqReader.close();
if(flankSeqs.size() > 2){
System.err.println("More than 2 sequences!");
System.exit(1);
}
SamReaderFactory.setDefaultValidationStringency(ValidationStringency.SILENT);
SamReader reader = SamReaderFactory.makeDefault().open(new File(bamFile));
SAMRecordIterator iter = reader.iterator();
SAMRecord rec;
AlignmentRecord curAlnRec;
String curReadName = "";
HashMap<String, HashMap<Contig, AlignmentRecord>> map = new HashMap<>();
// HashMap<String, Sequence> readsMap = new HashMap<>();
while (iter.hasNext()) {
rec = iter.next();
curReadName=rec.getReadName();
// if(!readsMap.containsKey(curReadName)){
//
// }
if (rec.getReadUnmappedFlag() || rec.getMappingQuality() < qual){
if(!map.containsKey(curReadName))
map.put(curReadName, new HashMap<>());
continue;
}
Contig flk = flankSeqs.get(rec.getReferenceIndex());
curAlnRec=new AlignmentRecord(rec, flk);
if(curAlnRec.refEnd-curAlnRec.refStart < .5*flk.length())
continue;
//not too far from the tip of read
else if(Math.min(-curAlnRec.readAlignmentEnd()+curAlnRec.readLength, curAlnRec.readAlignmentStart()) > (double)flk.length()*tipsPercentage/100.0){
continue;
}
HashMap<Contig, AlignmentRecord> data;
if(!map.containsKey(curReadName) || map.get(curReadName).isEmpty()){
data=new HashMap<>();
data.put(flk, curAlnRec);
map.put(curReadName, data);
// }else if(map.get(curReadName).size()==1 ){
// AlignmentRecord prev = map.get(curReadName).get(0);
// //not overlap each other
// if((prev.readStart-curAlnRec.readStart)*(prev.readEnd-curAlnRec.readStart) <= 0
// || (prev.readStart-curAlnRec.readEnd)*(prev.readEnd-curAlnRec.readEnd) <= 0 )
// continue;
// //the in-between must longer than insertLength
// if(prev.readAlignmentEnd()-prev.readAlignmentStart() + curAlnRec.readAlignmentEnd()-curAlnRec.readAlignmentStart() > curAlnRec.readLength -insertLength)
// continue;
//
// map.get(curReadName).add(curAlnRec);
}else{
data=map.get(curReadName);
if(!data.containsKey(flk)){
data.put(flk, curAlnRec);
}else if(data.get(flk).score < curAlnRec.score){
data.replace(flk, curAlnRec);
}
}
}// while
iter.close();
/**********************************************************************/
int totReadNum = map.keySet().size();
ArrayList<String> flank0 = new ArrayList<String>(),
flank1_0 = new ArrayList<String>(),
flank1_1 = new ArrayList<String>(),
flank2 = new ArrayList<String>();
map.keySet().stream().forEach(r->{
if(map.get(r).isEmpty())
flank0.add(r);
else if(map.get(r).keySet().size()==1){
if(map.get(r).containsKey(flankSeqs.get(0)))
flank1_0.add(r);
else if(map.get(r).containsKey(flankSeqs.get(1)))
flank1_1.add(r);
}
else if(map.get(r).size()==2)
flank2.add(r);
});
System.out.println( "Total number of reads: " + totReadNum);
System.out.println("Number of reads with 0 flank sequences: " + flank0.size());
flank0.stream().forEach(r->System.out.println(r));
System.out.println("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++");
System.out.printf("Number of reads with only 1 flank sequence %s: %d \n" , flankSeqs.get(0).getName(), flank1_0.size());
flank1_0.stream().forEach(r->System.out.println(r));
System.out.println("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++");
System.out.printf("Number of reads with only 1 flank sequence %s: %d \n" , flankSeqs.get(1).getName(), flank1_1.size());
flank1_1.stream().forEach(r->System.out.println(r));
System.out.println("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++");
System.out.println("Number of reads with 2 flank sequences: " + flank2.size());
flank2.stream().forEach(r->System.out.println(r));
System.out.println("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++");
}
}
| add parameter --cov to flankDetect | src/dev/java/japsadev/tools/FlankSeqsDetectorCmd.java | add parameter --cov to flankDetect | <ide><path>rc/dev/java/japsadev/tools/FlankSeqsDetectorCmd.java
<ide> addDouble("qual", 1, "Mininum quality");
<ide> addInt("insert", 10, "Minimum length of insert sequence in-between 2 flanking sequences");
<ide> addInt("tips", 20, "Maximum percentage of the overhangs compared to the corresponding flanking sequence");
<add> addDouble("cover", 80, "Mininum percentage of flank sequence coverage for a valid alignment");
<add>
<ide> addStdHelp();
<ide> }
<ide> /**
<ide> /**********************************************************************/
<ide> String flankSeqsFile= cmdLine.getStringVal("flankFile");
<ide> String bamFile = cmdLine.getStringVal("bamFile");
<del> double qual = cmdLine.getDoubleVal("qual");
<add> double qual = cmdLine.getDoubleVal("qual"),
<add> flkCov = cmdLine.getDoubleVal("cover");
<ide> int insertLength = cmdLine.getIntVal("insert"),
<ide> tipsPercentage = cmdLine.getIntVal("tips");
<ide>
<ide> }
<ide> Contig flk = flankSeqs.get(rec.getReferenceIndex());
<ide> curAlnRec=new AlignmentRecord(rec, flk);
<del> if(curAlnRec.refEnd-curAlnRec.refStart < .5*flk.length())
<add> if(curAlnRec.refEnd-curAlnRec.refStart < (double)flkCov*flk.length()/100.0)
<ide> continue;
<ide> //not too far from the tip of read
<ide> else if(Math.min(-curAlnRec.readAlignmentEnd()+curAlnRec.readLength, curAlnRec.readAlignmentStart()) > (double)flk.length()*tipsPercentage/100.0){
<ide> data=new HashMap<>();
<ide> data.put(flk, curAlnRec);
<ide> map.put(curReadName, data);
<del>// }else if(map.get(curReadName).size()==1 ){
<del>// AlignmentRecord prev = map.get(curReadName).get(0);
<del>// //not overlap each other
<del>// if((prev.readStart-curAlnRec.readStart)*(prev.readEnd-curAlnRec.readStart) <= 0
<del>// || (prev.readStart-curAlnRec.readEnd)*(prev.readEnd-curAlnRec.readEnd) <= 0 )
<del>// continue;
<del>// //the in-between must longer than insertLength
<del>// if(prev.readAlignmentEnd()-prev.readAlignmentStart() + curAlnRec.readAlignmentEnd()-curAlnRec.readAlignmentStart() > curAlnRec.readLength -insertLength)
<del>// continue;
<del>//
<del>// map.get(curReadName).add(curAlnRec);
<ide>
<ide> }else{
<ide> data=map.get(curReadName);
<ide> flank1_1.add(r);
<ide>
<ide> }
<del> else if(map.get(r).size()==2)
<add> else if(map.get(r).keySet().size()==2){
<add> AlignmentRecord aln0 = map.get(r).get(flankSeqs.get(0)),
<add> aln1 = map.get(r).get(flankSeqs.get(1));
<add> if((aln0.readStart-aln1.readStart)*(aln0.readEnd-aln1.readStart) <= 0
<add> || (aln0.readStart-aln1.readEnd)*(aln0.readEnd-aln1.readEnd) <= 0 )
<add> return;
<add> //the in-between must longer than insertLength
<add> if(aln0.readAlignmentEnd()-aln0.readAlignmentStart() + aln1.readAlignmentEnd()-aln1.readAlignmentStart() > aln1.readLength -insertLength)
<add> return;
<ide> flank2.add(r);
<add> }
<ide> });
<ide> System.out.println( "Total number of reads: " + totReadNum);
<ide> System.out.println("Number of reads with 0 flank sequences: " + flank0.size()); |
|
Java | bsd-3-clause | d8b138735b6d1276cdd27b11712deb1e80e03b48 | 0 | lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon | /*
Copyright (c) 2000-2018 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.plugin.pubfactory;
import org.htmlparser.Node;
import org.htmlparser.NodeFilter;
import org.htmlparser.Tag;
import org.htmlparser.filters.OrFilter;
import org.htmlparser.filters.TagNameFilter;
import org.htmlparser.tags.Bullet;
import org.htmlparser.tags.Div;
import org.htmlparser.util.NodeList;
import org.htmlparser.visitors.NodeVisitor;
import org.lockss.filter.FilterUtil;
import org.lockss.filter.WhiteSpaceFilter;
import org.lockss.filter.html.*;
import org.lockss.plugin.ArchivalUnit;
import org.lockss.plugin.FilterFactory;
import org.lockss.util.Logger;
import org.lockss.util.ReaderInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.util.Vector;
// Keeps contents only (includeNodes), then hashes out unwanted nodes
// within the content (excludeNodes).
public class PubFactoryHtmlHashFilterFactory implements FilterFactory {
private static final Logger log =
Logger.getLogger(PubFactoryHtmlHashFilterFactory.class);
@Override
public InputStream createFilteredInputStream(ArchivalUnit au,
InputStream in,
String encoding) {
NodeFilter[] excludeNodes = new NodeFilter[] {
HtmlNodeFilters.tag("head"),
new TagNameFilter("script"),
new TagNameFilter("noscript"),
// filter out comments
HtmlNodeFilters.comment(),
// citation overlay for download of ris - this has download date
// and the ris citation has a one-time key
// so just keep the referring article as a way of hashing
HtmlNodeFilters.allExceptSubtree(
HtmlNodeFilters.tagWithAttribute("div", "id", "previewWrapper"),
HtmlNodeFilters.tagWithAttributeRegex("a", "href", "/view/journals/")),
//html structure change in Oct/2020
//https://www.berghahnjournals.com/view/journals/boyhood-studies/12/1/bhs120101.xml
HtmlNodeFilters.tagWithAttributeRegex("div", "id", "headerWrap"),
HtmlNodeFilters.tagWithAttributeRegex("div", "id", "footerWrap"),
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "fixed-controls"),
/*
// Metrics on AMetSoc https://journals.ametsoc.org/view/journals/wcas/12/2/wcas-d-19-0115.1.xml
// class name is big e.g. "component component-content-item component-container container-metrics container-wrapper-43132"
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "component-content-metrics"),
// same with related content
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "component-related-content"),
*/
// Get rid of entire sidebar, as it has lots of dynamic ids etc
// "component component-content-item component-container container-sideBar container-wrapper-43148 container-accordion"
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "container-sideBar"),
// get rid of volume dropdown, it similarly has generated ids
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "component-volume-issue-selector"),
/* sometimes there is a little div near the bottom of the page that contains the IP address.
* <div id="debug" style="display: none"> <ul>
* <li id="xForwarded">[171.66.236.212]</li>
* <li id="modifiedRemoteAddr">171.66.236.212</li>
* </ul> </div>
*/
HtmlNodeFilters.tagWithAttribute("div", "id", "debug"),
// there are a number of input forms, comment boxes, etc to filter out
HtmlNodeFilters.tagWithAttributeRegex("form", "class", "annotationsForm"),
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "searchModule"),
// the access seems to change? maybe we caught them in a migration, but just to be safe, exclude the access icon
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "accessIcon"),
// cover image alt text changes? weird
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "component-cover-image"),
// if an article is added to some 'collection' after being published, it gets ammended with this tag
HtmlNodeFilters.tagWithAttributeRegex("dl", "class", "tax-collections "),
// there is a p tag that contains some copyright text.
// this tag occurs sometimes below the abstract.
// only way to filter it out is a regex on the content, as there are no attributes associated with the p tag.
// trying to be as conservative as possible to not remove possibly the whole abstract if the
// nodes get embedded or moved around from some reason.
HtmlNodeFilters.tagWithTextRegex("p", "^.{0,20}American Meteorological Society.{0,250}AMS Copyright Policy.{0,250}$"),
new NodeFilter() {
@Override
public boolean accept(Node node) {
// ifp:body is not a tag class, we can use this to our advantage and delete the whole node by matching a
// regex on the node and removing the whole thing without fear of deleting "child" nodes.
if (node instanceof Tag && node.getText().matches("^/?ifp:body.{0,150}$")) {
return true;
}
return false;
}
}
};
return getFilteredInputStream(au, in, encoding,
excludeNodes);
}
HtmlTransform xform = new HtmlTransform() {
@Override // this tranform removes dynamically generated attribute values from a number of tags and attributes.
public NodeList transform(NodeList nodeList) throws IOException {
try {
nodeList.visitAllNodesWith(new NodeVisitor() {
@Override
public void visitTag(Tag tag) {
String tagName = tag.getTagName().toLowerCase();
/* Many of the ul, and li tags contain dynamic attributes, aggressivley remove these
* <ul class="ajax-zone m-0 t-zone" id="zone115228561_1">
* <ul data-menu-list="list-id-567363a7-9393-49e7-..." ...>
* <li ... data-menu-item="list-id-fe284..." ...>
*/
if (tagName.equals("ul")) {
if (tag.getAttribute("id") != null){
tag.removeAttribute("id");
}
if (tag.getAttribute("data-menu-list") != null) {
tag.removeAttribute("data-menu-list");
}
} else if (tagName.equals("li")) {
if (tag.getAttribute("id") != null) {
tag.removeAttribute("id");
}
if (tag.getAttribute("data-menu-item") != null) {
tag.removeAttribute("data-menu-item");
}
}
/* Remove the generated id's from all the h# tags
* <h2 class="abstractTitle text-title my-1" id="d3038e2">Abstract</h2>
* <h3 id="d4951423e445">a. Satellite data</h3>
* <h4 id="d4951423e1002">On what scale does lightning enhancement occur?</h4>
*/
else if (tagName.matches("h\\d") && (tag.getAttribute("id") != null)) {
tag.removeAttribute("id");
}
/* remove these data-popover[-anchor] attributes that are dynamically generated from div and button tags
* <div data-popover-fullscreen="false" data-popover-placement="" data-popover-breakpoints="" data-popover="607a919f-a0fd-41c2-9100-deaaff9a0862" class="position-absolute display-none">
* <button data-popover-anchor="0979a884-7df8-4d05-a54...
*/
else if ("div".equals(tagName) || "button".equals(tagName)) {
if (tag.getAttribute("data-popover-anchor") != null) {
tag.removeAttribute("data-popover-anchor");
}
if (tag.getAttribute("data-popover") != null) {
tag.removeAttribute("data-popover");
}
// the container-wrapper-NUMBERS is dynamic
// <div class="component component-content-item component-container container-body container-tabbed container-wrapper-43131">
if (tag.getAttribute("class") != null && tag.getAttribute("class").matches(".*container-wrapper-.*")) {
tag.removeAttribute("class");
}
// <div id="container-43131-item-43166" class="container-item">
if (tag.getAttribute("id") != null && tag.getAttribute("id").matches(".*container-.*")) {
tag.removeAttribute("id");
}
} else if ("nav".equals(tagName) && (tag.getAttribute("id") != null) && tag.getAttribute("id").matches(".*container-.*") ) {
//<nav data-container-tab-address="tab_body" id="container-nav-43131" class="container-tabs">
tag.removeAttribute("id");
} else if ("a".equals(tagName)) {
// <a data-tab-id="abstract-display" title="" href="#container-43131-item-43130" tabIndex="0" role="button" type="button" class=" c-Button c-Button--medium ">
// for hashing, lets not worry about all the possible patterns of the internal dynamic links, just ignore all the internal hrefs
if ((tag.getAttribute("href") != null) && (tag.getAttribute("href").startsWith("#"))) {
tag.removeAttribute("href");
}
// <a data-tab-id="previewPdf-43621" title="" tabIndex="0" role="button" type="button" class=" c-Button c-Button--medium ">
if (tag.getAttribute("data-tab-id") != null) {
tag.removeAttribute("data-tab-id");
}
}
}
});
}
catch (Exception exc) {
log.debug2("Internal error (visitor)", exc); // Ignore this tag and move on
}
return nodeList;
}
};
// Takes include and exclude nodes as input. Removes white spaces.
public InputStream getFilteredInputStream(ArchivalUnit au, InputStream in,
// String encoding, NodeFilter[] includeNodes, NodeFilter[] excludeNodes) {
String encoding, NodeFilter[] excludeNodes) {
if (excludeNodes == null) {
throw new NullPointerException("excludeNodes array is null");
}
//if (includeNodes == null) {
// throw new NullPointerException("includeNodes array is null!");
//}
InputStream filtered;
filtered = new HtmlFilterInputStream(in, encoding,
new HtmlCompoundTransform(
// HtmlNodeFilterTransform.include(new OrFilter(includeNodes)),
HtmlNodeFilterTransform.exclude(new OrFilter(excludeNodes)), xform)
);
Reader reader = FilterUtil.getReader(filtered, encoding);
return new ReaderInputStream( new WhiteSpaceFilter(reader));
//return new ReaderInputStream(reader);
}
}
| plugins/src/org/lockss/plugin/pubfactory/PubFactoryHtmlHashFilterFactory.java | /*
Copyright (c) 2000-2018 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.plugin.pubfactory;
import org.htmlparser.Node;
import org.htmlparser.NodeFilter;
import org.htmlparser.Tag;
import org.htmlparser.filters.OrFilter;
import org.htmlparser.filters.TagNameFilter;
import org.htmlparser.tags.Bullet;
import org.htmlparser.tags.Div;
import org.htmlparser.util.NodeList;
import org.htmlparser.visitors.NodeVisitor;
import org.lockss.filter.FilterUtil;
import org.lockss.filter.WhiteSpaceFilter;
import org.lockss.filter.html.*;
import org.lockss.plugin.ArchivalUnit;
import org.lockss.plugin.FilterFactory;
import org.lockss.util.Logger;
import org.lockss.util.ReaderInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.util.Vector;
// Keeps contents only (includeNodes), then hashes out unwanted nodes
// within the content (excludeNodes).
public class PubFactoryHtmlHashFilterFactory implements FilterFactory {
private static final Logger log =
Logger.getLogger(PubFactoryHtmlHashFilterFactory.class);
@Override
public InputStream createFilteredInputStream(ArchivalUnit au,
InputStream in,
String encoding) {
NodeFilter[] excludeNodes = new NodeFilter[] {
HtmlNodeFilters.tag("head"),
new TagNameFilter("script"),
new TagNameFilter("noscript"),
// filter out comments
HtmlNodeFilters.comment(),
// citation overlay for download of ris - this has download date
// and the ris citation has a one-time key
// so just keep the referring article as a way of hashing
HtmlNodeFilters.allExceptSubtree(
HtmlNodeFilters.tagWithAttribute("div", "id", "previewWrapper"),
HtmlNodeFilters.tagWithAttributeRegex("a", "href", "/view/journals/")),
//html structure change in Oct/2020
//https://www.berghahnjournals.com/view/journals/boyhood-studies/12/1/bhs120101.xml
HtmlNodeFilters.tagWithAttributeRegex("div", "id", "headerWrap"),
HtmlNodeFilters.tagWithAttributeRegex("div", "id", "footerWrap"),
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "fixed-controls"),
/*
// Metrics on AMetSoc https://journals.ametsoc.org/view/journals/wcas/12/2/wcas-d-19-0115.1.xml
// class name is big e.g. "component component-content-item component-container container-metrics container-wrapper-43132"
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "component-content-metrics"),
// same with related content
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "component-related-content"),
*/
// Get rid of entire sidebar, as it has lots of dynamic ids etc
// "component component-content-item component-container container-sideBar container-wrapper-43148 container-accordion"
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "container-sideBar"),
// get rid of volume dropdown, it similarly has generated ids
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "component-volume-issue-selector"),
/* sometimes there is a little div near the bottom of the page that contains the IP address.
* <div id="debug" style="display: none"> <ul>
* <li id="xForwarded">[171.66.236.212]</li>
* <li id="modifiedRemoteAddr">171.66.236.212</li>
* </ul> </div>
*/
HtmlNodeFilters.tagWithAttribute("div", "id", "debug"),
// there are a number of input forms, comment boxes, etc to filter out
HtmlNodeFilters.tagWithAttributeRegex("form", "class", "annotationsForm"),
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "searchModule"),
// the access seems to change? maybe we caught them in a migration, but just to be safe, exclude the access icon
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "accessIcon"),
// cover image alt text changes? weird
HtmlNodeFilters.tagWithAttributeRegex("div", "class", "component-cover-image"),
// if an article is added to some 'collection' after being published, it gets ammended with this tag
HtmlNodeFilters.tagWithAttributeRegex("dl", "class", "tax-collections "),
// there is a p tag that contains some copyright text.
// this tag occurs sometimes below the abstract.
// only way to filter it out is a regex on the content, as there are no attributes associated with the p tag.
// trying to be as conservative as possible to not remove possibly the whole abstract if the
// nodes get embedded or moved around from some reason.
HtmlNodeFilters.tagWithTextRegex("p", "^.{0,20}American Meteorological Society.{0,250}AMS Copyright Policy.{0,250}$"),
new NodeFilter() {
@Override
public boolean accept(Node node) {
// ifp:body is not a HtmlTag class, we can use this to our advantage and delete the whole node by matching a
// regex on the node and removing the whole thing without fear of deleting "child" nodes.
if (node.getText().matches("^/?ifp:body.{0,150}$")) {
return true;
}
return false;
}
}
};
return getFilteredInputStream(au, in, encoding,
excludeNodes);
}
HtmlTransform xform = new HtmlTransform() {
@Override // this tranform removes dynamically generated attribute values from a number of tags and attributes.
public NodeList transform(NodeList nodeList) throws IOException {
try {
nodeList.visitAllNodesWith(new NodeVisitor() {
@Override
public void visitTag(Tag tag) {
String tagName = tag.getTagName().toLowerCase();
/* Many of the ul, and li tags contain dynamic attributes, aggressivley remove these
* <ul class="ajax-zone m-0 t-zone" id="zone115228561_1">
* <ul data-menu-list="list-id-567363a7-9393-49e7-..." ...>
* <li ... data-menu-item="list-id-fe284..." ...>
*/
if (tagName.equals("ul")) {
if (tag.getAttribute("id") != null){
tag.removeAttribute("id");
}
if (tag.getAttribute("data-menu-list") != null) {
tag.removeAttribute("data-menu-list");
}
} else if (tagName.equals("li")) {
if (tag.getAttribute("id") != null) {
tag.removeAttribute("id");
}
if (tag.getAttribute("data-menu-item") != null) {
tag.removeAttribute("data-menu-item");
}
}
/* Remove the generated id's from all the h# tags
* <h2 class="abstractTitle text-title my-1" id="d3038e2">Abstract</h2>
* <h3 id="d4951423e445">a. Satellite data</h3>
* <h4 id="d4951423e1002">On what scale does lightning enhancement occur?</h4>
*/
else if (tagName.matches("h\\d") && (tag.getAttribute("id") != null)) {
tag.removeAttribute("id");
}
/* remove these data-popover[-anchor] attributes that are dynamically generated from div and button tags
* <div data-popover-fullscreen="false" data-popover-placement="" data-popover-breakpoints="" data-popover="607a919f-a0fd-41c2-9100-deaaff9a0862" class="position-absolute display-none">
* <button data-popover-anchor="0979a884-7df8-4d05-a54...
*/
else if ("div".equals(tagName) || "button".equals(tagName)) {
if (tag.getAttribute("data-popover-anchor") != null) {
tag.removeAttribute("data-popover-anchor");
}
if (tag.getAttribute("data-popover") != null) {
tag.removeAttribute("data-popover");
}
// the container-wrapper-NUMBERS is dynamic
// <div class="component component-content-item component-container container-body container-tabbed container-wrapper-43131">
if (tag.getAttribute("class") != null && tag.getAttribute("class").matches(".*container-wrapper-.*")) {
tag.removeAttribute("class");
}
// <div id="container-43131-item-43166" class="container-item">
if (tag.getAttribute("id") != null && tag.getAttribute("id").matches(".*container-.*")) {
tag.removeAttribute("id");
}
} else if ("nav".equals(tagName) && (tag.getAttribute("id") != null) && tag.getAttribute("id").matches(".*container-.*") ) {
//<nav data-container-tab-address="tab_body" id="container-nav-43131" class="container-tabs">
tag.removeAttribute("id");
} else if ("a".equals(tagName)) {
// <a data-tab-id="abstract-display" title="" href="#container-43131-item-43130" tabIndex="0" role="button" type="button" class=" c-Button c-Button--medium ">
// for hashing, lets not worry about all the possible patterns of the internal dynamic links, just ignore all the internal hrefs
if ((tag.getAttribute("href") != null) && (tag.getAttribute("href").startsWith("#"))) {
tag.removeAttribute("href");
}
// <a data-tab-id="previewPdf-43621" title="" tabIndex="0" role="button" type="button" class=" c-Button c-Button--medium ">
if (tag.getAttribute("data-tab-id") != null) {
tag.removeAttribute("data-tab-id");
}
}
}
});
}
catch (Exception exc) {
log.debug2("Internal error (visitor)", exc); // Ignore this tag and move on
}
return nodeList;
}
};
// Takes include and exclude nodes as input. Removes white spaces.
public InputStream getFilteredInputStream(ArchivalUnit au, InputStream in,
// String encoding, NodeFilter[] includeNodes, NodeFilter[] excludeNodes) {
String encoding, NodeFilter[] excludeNodes) {
if (excludeNodes == null) {
throw new NullPointerException("excludeNodes array is null");
}
//if (includeNodes == null) {
// throw new NullPointerException("includeNodes array is null!");
//}
InputStream filtered;
filtered = new HtmlFilterInputStream(in, encoding,
new HtmlCompoundTransform(
// HtmlNodeFilterTransform.include(new OrFilter(includeNodes)),
HtmlNodeFilterTransform.exclude(new OrFilter(excludeNodes)), xform)
);
Reader reader = FilterUtil.getReader(filtered, encoding);
return new ReaderInputStream( new WhiteSpaceFilter(reader));
//return new ReaderInputStream(reader);
}
}
| minor modification to make previous commit more conservative of a filter
| plugins/src/org/lockss/plugin/pubfactory/PubFactoryHtmlHashFilterFactory.java | minor modification to make previous commit more conservative of a filter | <ide><path>lugins/src/org/lockss/plugin/pubfactory/PubFactoryHtmlHashFilterFactory.java
<ide> new NodeFilter() {
<ide> @Override
<ide> public boolean accept(Node node) {
<del> // ifp:body is not a HtmlTag class, we can use this to our advantage and delete the whole node by matching a
<add> // ifp:body is not a tag class, we can use this to our advantage and delete the whole node by matching a
<ide> // regex on the node and removing the whole thing without fear of deleting "child" nodes.
<del> if (node.getText().matches("^/?ifp:body.{0,150}$")) {
<add> if (node instanceof Tag && node.getText().matches("^/?ifp:body.{0,150}$")) {
<ide> return true;
<ide> }
<ide> return false; |
|
Java | apache-2.0 | 956689e54874d145f98fcd05603865c3a7f3ee25 | 0 | MichaelNedzelsky/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,diorcety/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,samthor/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,retomerz/intellij-community,supersven/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,ryano144/intellij-community,muntasirsyed/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,FHannes/intellij-community,clumsy/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,kdwink/intellij-community,xfournet/intellij-community,xfournet/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,michaelgallacher/intellij-community,robovm/robovm-studio,Distrotech/intellij-community,supersven/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,idea4bsd/idea4bsd,holmes/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,izonder/intellij-community,kdwink/intellij-community,apixandru/intellij-community,samthor/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,allotria/intellij-community,Lekanich/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,clumsy/intellij-community,slisson/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,adedayo/intellij-community,clumsy/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,akosyakov/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,blademainer/intellij-community,fnouama/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,signed/intellij-community,tmpgit/intellij-community,orekyuu/intellij-community,holmes/intellij-community,consulo/consulo,jagguli/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,kool79/intellij-community,gnuhub/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,vvv1559/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,FHannes/intellij-community,allotria/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,allotria/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,dslomov/intellij-community,fitermay/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,allotria/intellij-community,signed/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,caot/intellij-community,allotria/intellij-community,xfournet/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,ibinti/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,wreckJ/intellij-community,semonte/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,apixandru/intellij-community,da1z/intellij-community,semonte/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,TangHao1987/intellij-community,fitermay/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,blademainer/intellij-community,diorcety/intellij-community,jagguli/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,samthor/intellij-community,caot/intellij-community,ahb0327/intellij-community,signed/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,asedunov/intellij-community,allotria/intellij-community,wreckJ/intellij-community,MichaelNedzelsky/intellij-community,supersven/intellij-community,adedayo/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,kool79/intellij-community,ryano144/intellij-community,blademainer/intellij-community,signed/intellij-community,izonder/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,suncycheng/intellij-community,izonder/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,holmes/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,da1z/intellij-community,ol-loginov/intellij-community,MER-GROUP/intellij-community,petteyg/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,adedayo/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,allotria/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,SerCeMan/intellij-community,MichaelNedzelsky/intellij-community,wreckJ/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,holmes/intellij-community,suncycheng/intellij-community,Distrotech/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,ftomassetti/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,petteyg/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,da1z/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,semonte/intellij-community,allotria/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,gnuhub/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,slisson/intellij-community,consulo/consulo,ivan-fedorov/intellij-community,ernestp/consulo,ahb0327/intellij-community,clumsy/intellij-community,ryano144/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,vladmm/intellij-community,supersven/intellij-community,nicolargo/intellij-community,slisson/intellij-community,kdwink/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,holmes/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,amith01994/intellij-community,youdonghai/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,kool79/intellij-community,holmes/intellij-community,xfournet/intellij-community,ibinti/intellij-community,da1z/intellij-community,xfournet/intellij-community,diorcety/intellij-community,caot/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,ryano144/intellij-community,supersven/intellij-community,ernestp/consulo,muntasirsyed/intellij-community,fitermay/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,caot/intellij-community,FHannes/intellij-community,slisson/intellij-community,fitermay/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,fengbaicanhe/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,wreckJ/intellij-community,fnouama/intellij-community,FHannes/intellij-community,holmes/intellij-community,signed/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,da1z/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,vladmm/intellij-community,ernestp/consulo,amith01994/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,SerCeMan/intellij-community,salguarnieri/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,samthor/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,retomerz/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,blademainer/intellij-community,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,wreckJ/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,da1z/intellij-community,muntasirsyed/intellij-community,tmpgit/intellij-community,diorcety/intellij-community,signed/intellij-community,amith01994/intellij-community,signed/intellij-community,ryano144/intellij-community,asedunov/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,allotria/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,amith01994/intellij-community,petteyg/intellij-community,ahb0327/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,samthor/intellij-community,gnuhub/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,da1z/intellij-community,diorcety/intellij-community,dslomov/intellij-community,slisson/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,muntasirsyed/intellij-community,samthor/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,youdonghai/intellij-community,petteyg/intellij-community,FHannes/intellij-community,xfournet/intellij-community,robovm/robovm-studio,clumsy/intellij-community,mglukhikh/intellij-community,diorcety/intellij-community,retomerz/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,consulo/consulo,ryano144/intellij-community,akosyakov/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,holmes/intellij-community,samthor/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,ahb0327/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,fnouama/intellij-community,adedayo/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,Distrotech/intellij-community,kool79/intellij-community,consulo/consulo,signed/intellij-community,samthor/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,orekyuu/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,asedunov/intellij-community,ibinti/intellij-community,caot/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,kool79/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,semonte/intellij-community,ryano144/intellij-community,vladmm/intellij-community,supersven/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,ernestp/consulo,fitermay/intellij-community,asedunov/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,ivan-fedorov/intellij-community,allotria/intellij-community,caot/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,slisson/intellij-community,blademainer/intellij-community,ryano144/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,FHannes/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,pwoodworth/intellij-community,robovm/robovm-studio,consulo/consulo,slisson/intellij-community,MER-GROUP/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,petteyg/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,caot/intellij-community,diorcety/intellij-community,adedayo/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,allotria/intellij-community,vladmm/intellij-community,slisson/intellij-community,asedunov/intellij-community,supersven/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,apixandru/intellij-community,robovm/robovm-studio,kool79/intellij-community,robovm/robovm-studio,supersven/intellij-community,petteyg/intellij-community,retomerz/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,Lekanich/intellij-community,caot/intellij-community,salguarnieri/intellij-community,ernestp/consulo,dslomov/intellij-community,Lekanich/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,semonte/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,slisson/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,caot/intellij-community,ibinti/intellij-community,kdwink/intellij-community,dslomov/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,kool79/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,FHannes/intellij-community,holmes/intellij-community,slisson/intellij-community,ahb0327/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,da1z/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,FHannes/intellij-community,signed/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,robovm/robovm-studio,hurricup/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,fnouama/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,supersven/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,orekyuu/intellij-community,TangHao1987/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,samthor/intellij-community,ol-loginov/intellij-community,Distrotech/intellij-community,jagguli/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,ernestp/consulo,signed/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,ahb0327/intellij-community,holmes/intellij-community,amith01994/intellij-community,asedunov/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,caot/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,consulo/consulo,asedunov/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,amith01994/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,xfournet/intellij-community,ibinti/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,MichaelNedzelsky/intellij-community | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.io;
import com.intellij.openapi.Forceable;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.util.SystemProperties;
import com.intellij.util.containers.hash.LinkedHashMap;
import jsr166e.SequenceLock;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import sun.misc.VM;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
/**
* @author max
*/
public class PagedFileStorage implements Forceable {
protected static final Logger LOG = Logger.getInstance("#com.intellij.util.io.PagedFileStorage");
public static final int MB = 1024 * 1024;
private final static int LOWER_LIMIT;
private final static int UPPER_LIMIT;
public final static int BUFFER_SIZE;
private static final int UNKNOWN_PAGE = -1;
static {
final int lower = 100;
final int upper = SystemInfo.is64Bit && !PersistentEnumeratorDelegate.useBtree() ? 500 : 200;
BUFFER_SIZE = Math.max(1, SystemProperties.getIntProperty("idea.paged.storage.page.size", 10)) * MB;
if (ByteBufferWrapper.NO_MMAP) {
final long max = VM.maxDirectMemory() - 2 * BUFFER_SIZE;
LOWER_LIMIT = (int)Math.min(lower * MB, max);
UPPER_LIMIT = (int)Math.min(Math.max(LOWER_LIMIT, SystemProperties.getIntProperty("idea.max.paged.storage.cache", upper) * MB), max);
}
else {
LOWER_LIMIT = lower * MB;
UPPER_LIMIT = Math.max(LOWER_LIMIT, SystemProperties.getIntProperty("idea.max.paged.storage.cache", upper) * MB);
}
LOG.info("lower=" + (LOWER_LIMIT / MB) +
"; upper=" + (UPPER_LIMIT / MB) +
"; buffer=" + (BUFFER_SIZE / MB) +
"; mmap=" + (!ByteBufferWrapper.NO_MMAP));
}
private final StorageLockContext myStorageLockContext;
private int myLastPage = UNKNOWN_PAGE;
private int myLastPage2 = UNKNOWN_PAGE;
private int myLastPage3 = UNKNOWN_PAGE;
private ByteBufferWrapper myLastBuffer;
private ByteBufferWrapper myLastBuffer2;
private ByteBufferWrapper myLastBuffer3;
private int myLastChangeCount;
private int myLastChangeCount2;
private int myLastChangeCount3;
private int myStorageIndex;
private static final int MAX_PAGES_COUNT = 0xFFFF;
private static final int MAX_LIVE_STORAGES_COUNT = 0xFFFF;
public void lock() {
myStorageLockContext.myLock.lock();
}
public void unlock() {
myStorageLockContext.myLock.unlock();
}
public StorageLockContext getStorageLockContext() {
return myStorageLockContext;
}
private final byte[] myTypedIOBuffer;
private volatile boolean isDirty = false;
private final File myFile;
protected long mySize = -1;
protected final int myPageSize;
protected final boolean myValuesAreBufferAligned;
@NonNls private static final String RW = "rw";
public PagedFileStorage(File file, StorageLock lock, int pageSize, boolean valuesAreBufferAligned) throws IOException {
this(file, lock.myDefaultStorageLockContext, pageSize, valuesAreBufferAligned);
}
public PagedFileStorage(File file, StorageLockContext storageLockContext, int pageSize, boolean valuesAreBufferAligned) throws IOException {
myFile = file;
myStorageLockContext = storageLockContext;
myPageSize = Math.max(pageSize > 0 ? pageSize : BUFFER_SIZE, Page.PAGE_SIZE);
myValuesAreBufferAligned = valuesAreBufferAligned;
myStorageIndex = storageLockContext.myStorageLock.registerPagedFileStorage(this);
myTypedIOBuffer = valuesAreBufferAligned ? null:new byte[8];
}
public PagedFileStorage(File file, StorageLock lock) throws IOException {
this(file, lock, BUFFER_SIZE, false);
}
public File getFile() {
return myFile;
}
public void putInt(int addr, int value) {
if (myValuesAreBufferAligned) {
int page = addr / myPageSize;
int page_offset = addr % myPageSize;
getBuffer(page).putInt(page_offset, value);
} else {
Bits.putInt(myTypedIOBuffer, 0, value);
put(addr, myTypedIOBuffer, 0, 4);
}
}
public int getInt(int addr) {
if (myValuesAreBufferAligned) {
int page = addr / myPageSize;
int page_offset = addr % myPageSize;
return getBuffer(page, false).getInt(page_offset);
} else {
get(addr, myTypedIOBuffer, 0, 4);
return Bits.getInt(myTypedIOBuffer, 0);
}
}
public final void putShort(int addr, short value) {
if (myValuesAreBufferAligned) {
int page = addr / myPageSize;
int page_offset = addr % myPageSize;
getBuffer(page).putShort(page_offset, value);
} else {
Bits.putShort(myTypedIOBuffer, 0, value);
put(addr, myTypedIOBuffer, 0, 2);
}
}
int getOffsetInPage(int addr) {
return addr % myPageSize;
}
ByteBuffer getByteBuffer(int address, boolean modify) {
return getBuffer(address / myPageSize, modify);
}
public final short getShort(int addr) {
if (myValuesAreBufferAligned) {
int page = addr / myPageSize;
int page_offset = addr % myPageSize;
return getBuffer(page, false).getShort(page_offset);
} else {
get(addr, myTypedIOBuffer, 0, 2);
return Bits.getShort(myTypedIOBuffer, 0);
}
}
public void putLong(int addr, long value) {
if (myValuesAreBufferAligned) {
int page = addr / myPageSize;
int page_offset = addr % myPageSize;
getBuffer(page).putLong(page_offset, value);
} else {
Bits.putLong(myTypedIOBuffer, 0, value);
put(addr, myTypedIOBuffer, 0, 8);
}
}
@SuppressWarnings({"UnusedDeclaration"})
public void putByte(final int addr, final byte b) {
put(addr, b);
}
public byte getByte(int addr) {
return get(addr);
}
public long getLong(int addr) {
if (myValuesAreBufferAligned) {
int page = addr / myPageSize;
int page_offset = addr % myPageSize;
return getBuffer(page, false).getLong(page_offset);
} else {
get(addr, myTypedIOBuffer, 0, 8);
return Bits.getLong(myTypedIOBuffer, 0);
}
}
public byte get(int index) {
int page = index / myPageSize;
int offset = index % myPageSize;
return getBuffer(page, false).get(offset);
}
public void put(int index, byte value) {
int page = index / myPageSize;
int offset = index % myPageSize;
getBuffer(page).put(offset, value);
}
public void get(int index, byte[] dst, int offset, int length) {
int i = index;
int o = offset;
int l = length;
while (l > 0) {
int page = i / myPageSize;
int page_offset = i % myPageSize;
int page_len = Math.min(l, myPageSize - page_offset);
final ByteBuffer buffer = getBuffer(page, false);
try {
buffer.position(page_offset);
}
catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("can't position buffer to offset " + page_offset + ", " +
"buffer.limit=" + buffer.limit() + ", " +
"page=" + page + ", " +
"file=" + myFile.getName() + ", "+
"file.length=" + mySize);
}
buffer.get(dst, o, page_len);
l -= page_len;
o += page_len;
i += page_len;
}
}
public void put(int index, byte[] src, int offset, int length) {
int i = index;
int o = offset;
int l = length;
while (l > 0) {
int page = i / myPageSize;
int page_offset = i % myPageSize;
int page_len = Math.min(l, myPageSize - page_offset);
final ByteBuffer buffer = getBuffer(page);
try {
buffer.position(page_offset);
}
catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("can't position buffer to offset " + page_offset);
}
buffer.put(src, o, page_len);
l -= page_len;
o += page_len;
i += page_len;
}
}
public void close() {
try {
force();
}
finally {
unmapAll();
myStorageLockContext.myStorageLock.myIndex2Storage.remove(myStorageIndex);
myStorageIndex = -1;
}
}
private void unmapAll() {
myStorageLockContext.myStorageLock.unmapBuffersForOwner(myStorageIndex, myStorageLockContext);
myLastPage = UNKNOWN_PAGE;
myLastPage2 = UNKNOWN_PAGE;
myLastPage3 = UNKNOWN_PAGE;
myLastBuffer = null;
myLastBuffer2 = null;
myLastBuffer3 = null;
}
public void resize(int newSize) throws IOException {
int oldSize = (int)myFile.length();
if (oldSize == newSize) return;
final long started = IOStatistics.DEBUG ? System.currentTimeMillis():0;
myStorageLockContext.myStorageLock.invalidateBuffer((int)(myStorageIndex | (mySize / myPageSize)));
//unmapAll(); // we do not need it since all page alighned buffers can be reused
final long unmapAllFinished = IOStatistics.DEBUG ? System.currentTimeMillis():0;
resizeFile(newSize);
// it is not guaranteed that new partition will consist of null
// after resize, so we should fill it manually
int delta = newSize - oldSize;
if (delta > 0) fillWithZeros(oldSize, delta);
if (IOStatistics.DEBUG) {
long finished = System.currentTimeMillis();
if (finished - started > IOStatistics.MIN_IO_TIME_TO_REPORT) {
IOStatistics.dump("Resized "+myFile + " from " + oldSize + " to " + newSize + " for " + (finished - started) + ", unmap all:" + (finished - unmapAllFinished));
}
}
}
private void resizeFile(int newSize) throws IOException {
RandomAccessFile raf = new RandomAccessFile(myFile, RW);
try {
raf.setLength(newSize);
}
finally {
raf.close();
}
mySize = newSize;
}
private final static int MAX_FILLER_SIZE = 8192;
private void fillWithZeros(int from, int length) {
byte[] buff = new byte[MAX_FILLER_SIZE];
Arrays.fill(buff, (byte)0);
while (length > 0) {
final int filled = Math.min(length, MAX_FILLER_SIZE);
put(from, buff, 0, filled);
length -= filled;
from += filled;
}
}
public final long length() {
if (mySize == -1) {
mySize = myFile.length();
}
return mySize;
}
private ByteBuffer getBuffer(int page) {
return getBuffer(page, true);
}
private ByteBuffer getBuffer(int page, boolean modify) {
if (myLastPage == page) {
ByteBuffer buf = myLastBuffer.getCachedBuffer();
if (buf != null && myLastChangeCount == myStorageLockContext.myStorageLock.myMappingChangeCount) {
if (modify) markDirty(myLastBuffer);
return buf;
}
} else if (myLastPage2 == page) {
ByteBuffer buf = myLastBuffer2.getCachedBuffer();
if (buf != null && myLastChangeCount2 == myStorageLockContext.myStorageLock.myMappingChangeCount) {
if (modify) markDirty(myLastBuffer2);
return buf;
}
} else if (myLastPage3 == page) {
ByteBuffer buf = myLastBuffer3.getCachedBuffer();
if (buf != null && myLastChangeCount3 == myStorageLockContext.myStorageLock.myMappingChangeCount) {
if (modify) markDirty(myLastBuffer3);
return buf;
}
}
try {
assert page <= MAX_PAGES_COUNT;
if (myStorageIndex == -1) {
myStorageIndex = myStorageLockContext.myStorageLock.registerPagedFileStorage(this);
}
ByteBufferWrapper byteBufferWrapper = myStorageLockContext.myStorageLock.get(myStorageIndex | page);
if (modify) markDirty(byteBufferWrapper);
ByteBuffer buf = byteBufferWrapper.getBuffer();
if (myLastPage != page) {
myLastPage3 = myLastPage2;
myLastBuffer3 = myLastBuffer2;
myLastChangeCount3 = myLastChangeCount2;
myLastPage2 = myLastPage;
myLastBuffer2 = myLastBuffer;
myLastChangeCount2 = myLastChangeCount;
myLastBuffer = byteBufferWrapper;
myLastPage = page;
} else {
myLastBuffer = byteBufferWrapper;
}
myLastChangeCount = myStorageLockContext.myStorageLock.myMappingChangeCount;
return buf;
}
catch (IOException e) {
throw new MappingFailedException("Cannot map buffer", e);
}
}
private void markDirty(ByteBufferWrapper buffer) {
if (!isDirty) isDirty = true;
buffer.markDirty();
}
public void force() {
long started = IOStatistics.DEBUG ? System.currentTimeMillis():0;
if (isDirty) {
myStorageLockContext.myStorageLock.flushBuffersForOwner(myStorageIndex, myStorageLockContext);
isDirty = false;
}
if (IOStatistics.DEBUG) {
long finished = System.currentTimeMillis();
if (finished - started > IOStatistics.MIN_IO_TIME_TO_REPORT) {
IOStatistics.dump("Flushed "+myFile + " for " + (finished - started));
}
}
}
public boolean isDirty() {
return isDirty;
}
public static class StorageLock {
private static final int FILE_INDEX_MASK = 0xFFFF0000;
private static final int FILE_INDEX_SHIFT = 16;
private final boolean checkThreadAccess;
public final StorageLockContext myDefaultStorageLockContext;
private final ConcurrentHashMap<Integer, PagedFileStorage> myIndex2Storage = new ConcurrentHashMap<Integer, PagedFileStorage>();
private final LinkedHashMap<Integer, ByteBufferWrapper> mySegments;
private final SequenceLock mySegmentsAccessLock = new SequenceLock(); // protects map operations of mySegments, needed for LRU order, mySize and myMappingChangeCount
private final SequenceLock mySegmentsAllocationLock = new SequenceLock();
private final ConcurrentLinkedQueue<ByteBufferWrapper> mySegmentsToRemove = new ConcurrentLinkedQueue<ByteBufferWrapper>();
private volatile long mySize;
private volatile long mySizeLimit;
private volatile int myMappingChangeCount;
public StorageLock() {
this(true);
}
public StorageLock(boolean checkThreadAccess) {
this.checkThreadAccess = checkThreadAccess;
myDefaultStorageLockContext = new StorageLockContext(this);
mySizeLimit = UPPER_LIMIT;
mySegments = new LinkedHashMap<Integer, ByteBufferWrapper>(10, 0.75f) {
@Override
protected boolean removeEldestEntry(Map.Entry<Integer, ByteBufferWrapper> eldest) {
return mySize > mySizeLimit;
}
@Nullable
@Override
public ByteBufferWrapper remove(Object key) {
// this method can be called after removeEldestEntry
ByteBufferWrapper wrapper = super.remove(key);
if (wrapper != null) {
++myMappingChangeCount;
mySegmentsToRemove.offer(wrapper);
mySize -= wrapper.myLength;
}
return wrapper;
}
};
}
public void lock() {
myDefaultStorageLockContext.myLock.lock();
}
public void unlock() {
myDefaultStorageLockContext.myLock.unlock();
}
private int registerPagedFileStorage(PagedFileStorage storage) {
int registered = myIndex2Storage.size();
assert registered <= MAX_LIVE_STORAGES_COUNT;
int value = registered << FILE_INDEX_SHIFT;
while(myIndex2Storage.putIfAbsent(value, storage) != null) {
++registered;
assert registered <= MAX_LIVE_STORAGES_COUNT;
value = registered << FILE_INDEX_SHIFT;
}
return value;
}
private PagedFileStorage getRegisteredPagedFileStorageByIndex(int index) {
return myIndex2Storage.get(index);
}
private ByteBufferWrapper get(Integer key) {
ByteBufferWrapper wrapper;
try { // fast path
mySegmentsAccessLock.lock();
wrapper = mySegments.get(key);
if (wrapper != null) return wrapper;
}
finally {
mySegmentsAccessLock.unlock();
}
mySegmentsAllocationLock.lock();
try {
// check if anybody cared about our segment
mySegmentsAccessLock.lock();
try {
wrapper = mySegments.get(key);
if (wrapper != null) return wrapper;
} finally {
mySegmentsAccessLock.unlock();
}
long started = IOStatistics.DEBUG ? System.currentTimeMillis() : 0;
wrapper = createValue(key);
if (IOStatistics.DEBUG) {
long finished = System.currentTimeMillis();
if (finished - started > IOStatistics.MIN_IO_TIME_TO_REPORT) {
IOStatistics.dump(
"Mapping " + wrapper.myLength + " from " + wrapper.myPosition + " file:" + wrapper.myFile + " for " + (finished - started));
}
}
mySegmentsAccessLock.lock();
try {
mySegments.put(key, wrapper);
mySize += wrapper.myLength;
}
finally {
mySegmentsAccessLock.unlock();
}
ensureSize(mySizeLimit);
return wrapper;
}
finally {
mySegmentsAllocationLock.unlock();
}
}
private void disposeRemovedSegments() {
assert mySegmentsAllocationLock.isHeldByCurrentThread();
Iterator<ByteBufferWrapper> iterator = mySegmentsToRemove.iterator();
while(iterator.hasNext()) {
iterator.next().dispose();
iterator.remove();
}
}
private void ensureSize(long sizeLimit) {
assert mySegmentsAllocationLock.isHeldByCurrentThread();
try {
mySegmentsAccessLock.lock();
while (mySize > sizeLimit) {
// we still have to drop something
mySegments.doRemoveEldestEntry();
}
} finally {
mySegmentsAccessLock.unlock();
}
disposeRemovedSegments();
}
@NotNull
private ByteBufferWrapper createValue(Integer key) {
final int storageIndex = key & FILE_INDEX_MASK;
PagedFileStorage owner = getRegisteredPagedFileStorageByIndex(storageIndex);
assert owner != null: "No storage for index " + storageIndex;
checkThreadAccess(owner.myStorageLockContext);
int off = (key & MAX_PAGES_COUNT) * owner.myPageSize;
if (off > owner.length()) {
throw new IndexOutOfBoundsException("off=" + off + " key.owner.length()=" + owner.length());
}
int min = Math.min((int)(owner.length() - off), owner.myPageSize);
ByteBufferWrapper wrapper = ByteBufferWrapper.readWriteDirect(owner.myFile, off, min);
Throwable oome = null;
while (true) {
try {
// ensure it's allocated
wrapper.getBuffer();
if (oome != null) {
LOG.info("Successfully recovered OOME in memory mapping: -Xmx=" + Runtime.getRuntime().maxMemory() / MB + "MB " +
"new size limit: " + mySizeLimit / MB + "MB " +
"trying to allocate " + wrapper.myLength + " block");
}
return wrapper;
}
catch (IOException e) {
throw new MappingFailedException("Cannot map buffer", e);
} catch (OutOfMemoryError e) {
oome = e;
if (mySizeLimit > LOWER_LIMIT) {
mySizeLimit -= owner.myPageSize;
}
long newSize = mySize - owner.myPageSize;
if (newSize >= 0) {
ensureSize(newSize);
continue; // next try
}
else {
throw new MappingFailedException(
"Cannot recover from OOME in memory mapping: -Xmx=" + Runtime.getRuntime().maxMemory() / MB + "MB " +
"new size limit: " + mySizeLimit / MB + "MB " +
"trying to allocate " + wrapper.myLength + " block", e);
}
}
}
}
private void checkThreadAccess(StorageLockContext storageLockContext) {
if (checkThreadAccess && !storageLockContext.myLock.isHeldByCurrentThread()) {
throw new IllegalStateException("Must hold StorageLock lock to access PagedFileStorage");
}
}
private @Nullable Map<Integer, ByteBufferWrapper> getBuffersOrderedForOwner(int index, StorageLockContext storageLockContext) {
mySegmentsAccessLock.lock();
try {
checkThreadAccess(storageLockContext);
Map<Integer, ByteBufferWrapper> mineBuffers = null;
for (Map.Entry<Integer, ByteBufferWrapper> entry : mySegments.entrySet()) {
if ((entry.getKey() & FILE_INDEX_MASK) == index) {
if (mineBuffers == null) {
mineBuffers = new TreeMap<Integer, ByteBufferWrapper>(new Comparator<Integer>() {
@Override
public int compare(Integer o1, Integer o2) {
return o1 - o2;
}
});
}
mineBuffers.put(entry.getKey(), entry.getValue());
}
}
return mineBuffers;
}
finally {
mySegmentsAccessLock.unlock();
}
}
private void unmapBuffersForOwner(int index, StorageLockContext storageLockContext) {
final Map<Integer, ByteBufferWrapper> buffers = getBuffersOrderedForOwner(index, storageLockContext);
if (buffers != null) {
mySegmentsAccessLock.lock();
try {
for (Integer key : buffers.keySet()) {
mySegments.remove(key);
}
}
finally {
mySegmentsAccessLock.unlock();
}
mySegmentsAllocationLock.lock();
try {
disposeRemovedSegments();
} finally {
mySegmentsAllocationLock.unlock();
}
}
}
private void flushBuffersForOwner(int index, StorageLockContext storageLockContext) {
Map<Integer, ByteBufferWrapper> buffers = getBuffersOrderedForOwner(index, storageLockContext);
if (buffers != null) {
mySegmentsAllocationLock.lock();
try {
for(ByteBufferWrapper buffer:buffers.values()) {
buffer.flush();
}
}
finally {
mySegmentsAllocationLock.unlock();
}
}
}
public void invalidateBuffer(int page) {
mySegmentsAccessLock.lock();
try {
mySegments.remove(page);
} finally {
mySegmentsAccessLock.unlock();
}
mySegmentsAllocationLock.lock();
try {
disposeRemovedSegments();
}
finally {
mySegmentsAllocationLock.unlock();
}
}
}
public static class StorageLockContext {
private final SequenceLock myLock;
private final StorageLock myStorageLock;
public StorageLockContext(StorageLock lock) {
myLock = new SequenceLock();
myStorageLock = lock;
}
}
}
| platform/util/src/com/intellij/util/io/PagedFileStorage.java | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.io;
import com.intellij.openapi.Forceable;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.util.SystemProperties;
import com.intellij.util.containers.hash.LinkedHashMap;
import jsr166e.SequenceLock;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import sun.misc.VM;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
/**
* @author max
*/
public class PagedFileStorage implements Forceable {
protected static final Logger LOG = Logger.getInstance("#com.intellij.util.io.PagedFileStorage");
public static final int MB = 1024 * 1024;
private final static int LOWER_LIMIT;
private final static int UPPER_LIMIT;
public final static int BUFFER_SIZE;
private static final int UNKNOWN_PAGE = -1;
static {
final int lower = 100;
final int upper = SystemInfo.is64Bit && !PersistentEnumeratorDelegate.useBtree() ? 500 : 200;
BUFFER_SIZE = Math.max(1, SystemProperties.getIntProperty("idea.paged.storage.page.size", 10)) * MB;
if (ByteBufferWrapper.NO_MMAP) {
final long max = VM.maxDirectMemory() - 2 * BUFFER_SIZE;
LOWER_LIMIT = (int)Math.min(lower * MB, max);
UPPER_LIMIT = (int)Math.min(Math.max(LOWER_LIMIT, SystemProperties.getIntProperty("idea.max.paged.storage.cache", upper) * MB), max);
}
else {
LOWER_LIMIT = lower * MB;
UPPER_LIMIT = Math.max(LOWER_LIMIT, SystemProperties.getIntProperty("idea.max.paged.storage.cache", upper) * MB);
}
LOG.info("lower=" + (LOWER_LIMIT / MB) +
"; upper=" + (UPPER_LIMIT / MB) +
"; buffer=" + (BUFFER_SIZE / MB) +
"; mmap=" + (!ByteBufferWrapper.NO_MMAP));
}
private final StorageLockContext myStorageLockContext;
private int myLastPage = UNKNOWN_PAGE;
private int myLastPage2 = UNKNOWN_PAGE;
private int myLastPage3 = UNKNOWN_PAGE;
private ByteBufferWrapper myLastBuffer;
private ByteBufferWrapper myLastBuffer2;
private ByteBufferWrapper myLastBuffer3;
private int myLastChangeCount;
private int myLastChangeCount2;
private int myLastChangeCount3;
private int myStorageIndex;
private static final int MAX_PAGES_COUNT = 0xFFFF;
private static final int MAX_LIVE_STORAGES_COUNT = 0xFFFF;
public void lock() {
myStorageLockContext.myLock.lock();
}
public void unlock() {
myStorageLockContext.myLock.unlock();
}
public StorageLockContext getStorageLockContext() {
return myStorageLockContext;
}
private final byte[] myTypedIOBuffer;
private boolean isDirty = false;
private final File myFile;
protected long mySize = -1;
protected final int myPageSize;
protected final boolean myValuesAreBufferAligned;
@NonNls private static final String RW = "rw";
public PagedFileStorage(File file, StorageLock lock, int pageSize, boolean valuesAreBufferAligned) throws IOException {
this(file, lock.myDefaultStorageLockContext, pageSize, valuesAreBufferAligned);
}
public PagedFileStorage(File file, StorageLockContext storageLockContext, int pageSize, boolean valuesAreBufferAligned) throws IOException {
myFile = file;
myStorageLockContext = storageLockContext;
myPageSize = Math.max(pageSize > 0 ? pageSize : BUFFER_SIZE, Page.PAGE_SIZE);
myValuesAreBufferAligned = valuesAreBufferAligned;
myStorageIndex = storageLockContext.myStorageLock.registerPagedFileStorage(this);
myTypedIOBuffer = valuesAreBufferAligned ? null:new byte[8];
}
public PagedFileStorage(File file, StorageLock lock) throws IOException {
this(file, lock, BUFFER_SIZE, false);
}
public File getFile() {
return myFile;
}
public void putInt(int addr, int value) {
if (myValuesAreBufferAligned) {
isDirty = true;
int page = addr / myPageSize;
int page_offset = addr % myPageSize;
getBuffer(page).putInt(page_offset, value);
} else {
Bits.putInt(myTypedIOBuffer, 0, value);
put(addr, myTypedIOBuffer, 0, 4);
}
}
public int getInt(int addr) {
if (myValuesAreBufferAligned) {
int page = addr / myPageSize;
int page_offset = addr % myPageSize;
return getBuffer(page, false).getInt(page_offset);
} else {
get(addr, myTypedIOBuffer, 0, 4);
return Bits.getInt(myTypedIOBuffer, 0);
}
}
public final void putShort(int addr, short value) {
if (myValuesAreBufferAligned) {
isDirty = true;
int page = addr / myPageSize;
int page_offset = addr % myPageSize;
getBuffer(page).putShort(page_offset, value);
} else {
Bits.putShort(myTypedIOBuffer, 0, value);
put(addr, myTypedIOBuffer, 0, 2);
}
}
int getOffsetInPage(int addr) {
return addr % myPageSize;
}
ByteBuffer getByteBuffer(int address, boolean modify) {
return getBuffer(address / myPageSize, modify);
}
public final short getShort(int addr) {
if (myValuesAreBufferAligned) {
int page = addr / myPageSize;
int page_offset = addr % myPageSize;
return getBuffer(page, false).getShort(page_offset);
} else {
get(addr, myTypedIOBuffer, 0, 2);
return Bits.getShort(myTypedIOBuffer, 0);
}
}
public void putLong(int addr, long value) {
if (myValuesAreBufferAligned) {
isDirty = true;
int page = addr / myPageSize;
int page_offset = addr % myPageSize;
getBuffer(page).putLong(page_offset, value);
} else {
Bits.putLong(myTypedIOBuffer, 0, value);
put(addr, myTypedIOBuffer, 0, 8);
}
}
@SuppressWarnings({"UnusedDeclaration"})
public void putByte(final int addr, final byte b) {
put(addr, b);
}
public byte getByte(int addr) {
return get(addr);
}
public long getLong(int addr) {
if (myValuesAreBufferAligned) {
int page = addr / myPageSize;
int page_offset = addr % myPageSize;
return getBuffer(page, false).getLong(page_offset);
} else {
get(addr, myTypedIOBuffer, 0, 8);
return Bits.getLong(myTypedIOBuffer, 0);
}
}
public byte get(int index) {
int page = index / myPageSize;
int offset = index % myPageSize;
return getBuffer(page, false).get(offset);
}
public void put(int index, byte value) {
isDirty = true;
int page = index / myPageSize;
int offset = index % myPageSize;
getBuffer(page).put(offset, value);
}
public void get(int index, byte[] dst, int offset, int length) {
int i = index;
int o = offset;
int l = length;
while (l > 0) {
int page = i / myPageSize;
int page_offset = i % myPageSize;
int page_len = Math.min(l, myPageSize - page_offset);
final ByteBuffer buffer = getBuffer(page, false);
try {
buffer.position(page_offset);
}
catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("can't position buffer to offset " + page_offset + ", " +
"buffer.limit=" + buffer.limit() + ", " +
"page=" + page + ", " +
"file=" + myFile.getName() + ", "+
"file.length=" + mySize);
}
buffer.get(dst, o, page_len);
l -= page_len;
o += page_len;
i += page_len;
}
}
public void put(int index, byte[] src, int offset, int length) {
isDirty = true;
int i = index;
int o = offset;
int l = length;
while (l > 0) {
int page = i / myPageSize;
int page_offset = i % myPageSize;
int page_len = Math.min(l, myPageSize - page_offset);
final ByteBuffer buffer = getBuffer(page);
try {
buffer.position(page_offset);
}
catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("can't position buffer to offset " + page_offset);
}
buffer.put(src, o, page_len);
l -= page_len;
o += page_len;
i += page_len;
}
}
public void close() {
try {
force();
}
finally {
unmapAll();
myStorageLockContext.myStorageLock.myIndex2Storage.remove(myStorageIndex);
myStorageIndex = -1;
}
}
private void unmapAll() {
myStorageLockContext.myStorageLock.unmapBuffersForOwner(myStorageIndex, myStorageLockContext);
myLastPage = UNKNOWN_PAGE;
myLastPage2 = UNKNOWN_PAGE;
myLastPage3 = UNKNOWN_PAGE;
myLastBuffer = null;
myLastBuffer2 = null;
myLastBuffer3 = null;
}
public void resize(int newSize) throws IOException {
int oldSize = (int)myFile.length();
if (oldSize == newSize) return;
final long started = IOStatistics.DEBUG ? System.currentTimeMillis():0;
myStorageLockContext.myStorageLock.invalidateBuffer((int)(myStorageIndex | (mySize / myPageSize)));
//unmapAll(); // we do not need it since all page alighned buffers can be reused
final long unmapAllFinished = IOStatistics.DEBUG ? System.currentTimeMillis():0;
resizeFile(newSize);
// it is not guaranteed that new partition will consist of null
// after resize, so we should fill it manually
int delta = newSize - oldSize;
if (delta > 0) fillWithZeros(oldSize, delta);
if (IOStatistics.DEBUG) {
long finished = System.currentTimeMillis();
if (finished - started > IOStatistics.MIN_IO_TIME_TO_REPORT) {
IOStatistics.dump("Resized "+myFile + " from " + oldSize + " to " + newSize + " for " + (finished - started) + ", unmap all:" + (finished - unmapAllFinished));
}
}
}
private void resizeFile(int newSize) throws IOException {
RandomAccessFile raf = new RandomAccessFile(myFile, RW);
try {
raf.setLength(newSize);
}
finally {
raf.close();
}
mySize = newSize;
}
private final static int MAX_FILLER_SIZE = 8192;
private void fillWithZeros(int from, int length) {
byte[] buff = new byte[MAX_FILLER_SIZE];
Arrays.fill(buff, (byte)0);
while (length > 0) {
final int filled = Math.min(length, MAX_FILLER_SIZE);
put(from, buff, 0, filled);
length -= filled;
from += filled;
}
}
public final long length() {
if (mySize == -1) {
mySize = myFile.length();
}
return mySize;
}
private ByteBuffer getBuffer(int page) {
return getBuffer(page, true);
}
private ByteBuffer getBuffer(int page, boolean modify) {
if (myLastPage == page) {
ByteBuffer buf = myLastBuffer.getCachedBuffer();
if (buf != null && myLastChangeCount == myStorageLockContext.myStorageLock.myMappingChangeCount) {
if (modify) myLastBuffer.markDirty();
return buf;
}
} else if (myLastPage2 == page) {
ByteBuffer buf = myLastBuffer2.getCachedBuffer();
if (buf != null && myLastChangeCount2 == myStorageLockContext.myStorageLock.myMappingChangeCount) {
if (modify) myLastBuffer2.markDirty();
return buf;
}
} else if (myLastPage3 == page) {
ByteBuffer buf = myLastBuffer3.getCachedBuffer();
if (buf != null && myLastChangeCount3 == myStorageLockContext.myStorageLock.myMappingChangeCount) {
if (modify) myLastBuffer3.markDirty();
return buf;
}
}
try {
assert page <= MAX_PAGES_COUNT;
if (myStorageIndex == -1) {
myStorageIndex = myStorageLockContext.myStorageLock.registerPagedFileStorage(this);
}
ByteBufferWrapper byteBufferWrapper = myStorageLockContext.myStorageLock.get(myStorageIndex | page);
if (modify) byteBufferWrapper.markDirty();
ByteBuffer buf = byteBufferWrapper.getBuffer();
if (myLastPage != page) {
myLastPage3 = myLastPage2;
myLastBuffer3 = myLastBuffer2;
myLastChangeCount3 = myLastChangeCount2;
myLastPage2 = myLastPage;
myLastBuffer2 = myLastBuffer;
myLastChangeCount2 = myLastChangeCount;
myLastBuffer = byteBufferWrapper;
myLastPage = page;
} else {
myLastBuffer = byteBufferWrapper;
}
myLastChangeCount = myStorageLockContext.myStorageLock.myMappingChangeCount;
return buf;
}
catch (IOException e) {
throw new MappingFailedException("Cannot map buffer", e);
}
}
public void force() {
long started = IOStatistics.DEBUG ? System.currentTimeMillis():0;
if (isDirty) {
myStorageLockContext.myStorageLock.flushBuffersForOwner(myStorageIndex, myStorageLockContext);
isDirty = false;
}
if (IOStatistics.DEBUG) {
long finished = System.currentTimeMillis();
if (finished - started > IOStatistics.MIN_IO_TIME_TO_REPORT) {
IOStatistics.dump("Flushed "+myFile + " for " + (finished - started));
}
}
}
public boolean isDirty() {
return isDirty;
}
public static class StorageLock {
private static final int FILE_INDEX_MASK = 0xFFFF0000;
private static final int FILE_INDEX_SHIFT = 16;
private final boolean checkThreadAccess;
public final StorageLockContext myDefaultStorageLockContext;
private final ConcurrentHashMap<Integer, PagedFileStorage> myIndex2Storage = new ConcurrentHashMap<Integer, PagedFileStorage>();
private final LinkedHashMap<Integer, ByteBufferWrapper> mySegments;
private final SequenceLock mySegmentsAccessLock = new SequenceLock(); // protects map operations of mySegments, needed for LRU order, mySize and myMappingChangeCount
private final SequenceLock mySegmentsAllocationLock = new SequenceLock();
private final ConcurrentLinkedQueue<ByteBufferWrapper> mySegmentsToRemove = new ConcurrentLinkedQueue<ByteBufferWrapper>();
private volatile long mySize;
private volatile long mySizeLimit;
private volatile int myMappingChangeCount;
public StorageLock() {
this(true);
}
public StorageLock(boolean checkThreadAccess) {
this.checkThreadAccess = checkThreadAccess;
myDefaultStorageLockContext = new StorageLockContext(this);
mySizeLimit = UPPER_LIMIT;
mySegments = new LinkedHashMap<Integer, ByteBufferWrapper>(10, 0.75f) {
@Override
protected boolean removeEldestEntry(Map.Entry<Integer, ByteBufferWrapper> eldest) {
return mySize > mySizeLimit;
}
@Nullable
@Override
public ByteBufferWrapper remove(Object key) {
// this method can be called after removeEldestEntry
ByteBufferWrapper wrapper = super.remove(key);
if (wrapper != null) {
++myMappingChangeCount;
mySegmentsToRemove.offer(wrapper);
mySize -= wrapper.myLength;
}
return wrapper;
}
};
}
public void lock() {
myDefaultStorageLockContext.myLock.lock();
}
public void unlock() {
myDefaultStorageLockContext.myLock.unlock();
}
private int registerPagedFileStorage(PagedFileStorage storage) {
int registered = myIndex2Storage.size();
assert registered <= MAX_LIVE_STORAGES_COUNT;
int value = registered << FILE_INDEX_SHIFT;
while(myIndex2Storage.putIfAbsent(value, storage) != null) {
++registered;
assert registered <= MAX_LIVE_STORAGES_COUNT;
value = registered << FILE_INDEX_SHIFT;
}
return value;
}
private PagedFileStorage getRegisteredPagedFileStorageByIndex(int index) {
return myIndex2Storage.get(index);
}
private ByteBufferWrapper get(Integer key) {
ByteBufferWrapper wrapper;
try { // fast path
mySegmentsAccessLock.lock();
wrapper = mySegments.get(key);
if (wrapper != null) return wrapper;
}
finally {
mySegmentsAccessLock.unlock();
}
mySegmentsAllocationLock.lock();
try {
// check if anybody cared about our segment
mySegmentsAccessLock.lock();
try {
wrapper = mySegments.get(key);
if (wrapper != null) return wrapper;
} finally {
mySegmentsAccessLock.unlock();
}
long started = IOStatistics.DEBUG ? System.currentTimeMillis() : 0;
wrapper = createValue(key);
if (IOStatistics.DEBUG) {
long finished = System.currentTimeMillis();
if (finished - started > IOStatistics.MIN_IO_TIME_TO_REPORT) {
IOStatistics.dump(
"Mapping " + wrapper.myLength + " from " + wrapper.myPosition + " file:" + wrapper.myFile + " for " + (finished - started));
}
}
mySegmentsAccessLock.lock();
try {
mySegments.put(key, wrapper);
mySize += wrapper.myLength;
}
finally {
mySegmentsAccessLock.unlock();
}
ensureSize(mySizeLimit);
return wrapper;
}
finally {
mySegmentsAllocationLock.unlock();
}
}
private void disposeRemovedSegments() {
assert mySegmentsAllocationLock.isHeldByCurrentThread();
Iterator<ByteBufferWrapper> iterator = mySegmentsToRemove.iterator();
while(iterator.hasNext()) {
iterator.next().dispose();
iterator.remove();
}
}
private void ensureSize(long sizeLimit) {
assert mySegmentsAllocationLock.isHeldByCurrentThread();
try {
mySegmentsAccessLock.lock();
while (mySize > sizeLimit) {
// we still have to drop something
mySegments.doRemoveEldestEntry();
}
} finally {
mySegmentsAccessLock.unlock();
}
disposeRemovedSegments();
}
@NotNull
private ByteBufferWrapper createValue(Integer key) {
final int storageIndex = key & FILE_INDEX_MASK;
PagedFileStorage owner = getRegisteredPagedFileStorageByIndex(storageIndex);
assert owner != null: "No storage for index " + storageIndex;
checkThreadAccess(owner.myStorageLockContext);
int off = (key & MAX_PAGES_COUNT) * owner.myPageSize;
if (off > owner.length()) {
throw new IndexOutOfBoundsException("off=" + off + " key.owner.length()=" + owner.length());
}
int min = Math.min((int)(owner.length() - off), owner.myPageSize);
ByteBufferWrapper wrapper = ByteBufferWrapper.readWriteDirect(owner.myFile, off, min);
Throwable oome = null;
while (true) {
try {
// ensure it's allocated
wrapper.getBuffer();
if (oome != null) {
LOG.info("Successfully recovered OOME in memory mapping: -Xmx=" + Runtime.getRuntime().maxMemory() / MB + "MB " +
"new size limit: " + mySizeLimit / MB + "MB " +
"trying to allocate " + wrapper.myLength + " block");
}
return wrapper;
}
catch (IOException e) {
throw new MappingFailedException("Cannot map buffer", e);
} catch (OutOfMemoryError e) {
oome = e;
if (mySizeLimit > LOWER_LIMIT) {
mySizeLimit -= owner.myPageSize;
}
long newSize = mySize - owner.myPageSize;
if (newSize >= 0) {
ensureSize(newSize);
continue; // next try
}
else {
throw new MappingFailedException(
"Cannot recover from OOME in memory mapping: -Xmx=" + Runtime.getRuntime().maxMemory() / MB + "MB " +
"new size limit: " + mySizeLimit / MB + "MB " +
"trying to allocate " + wrapper.myLength + " block", e);
}
}
}
}
private void checkThreadAccess(StorageLockContext storageLockContext) {
if (checkThreadAccess && !storageLockContext.myLock.isHeldByCurrentThread()) {
throw new IllegalStateException("Must hold StorageLock lock to access PagedFileStorage");
}
}
private @Nullable Map<Integer, ByteBufferWrapper> getBuffersOrderedForOwner(int index, StorageLockContext storageLockContext) {
mySegmentsAccessLock.lock();
try {
checkThreadAccess(storageLockContext);
Map<Integer, ByteBufferWrapper> mineBuffers = null;
for (Map.Entry<Integer, ByteBufferWrapper> entry : mySegments.entrySet()) {
if ((entry.getKey() & FILE_INDEX_MASK) == index) {
if (mineBuffers == null) {
mineBuffers = new TreeMap<Integer, ByteBufferWrapper>(new Comparator<Integer>() {
@Override
public int compare(Integer o1, Integer o2) {
return o1 - o2;
}
});
}
mineBuffers.put(entry.getKey(), entry.getValue());
}
}
return mineBuffers;
}
finally {
mySegmentsAccessLock.unlock();
}
}
private void unmapBuffersForOwner(int index, StorageLockContext storageLockContext) {
final Map<Integer, ByteBufferWrapper> buffers = getBuffersOrderedForOwner(index, storageLockContext);
if (buffers != null) {
mySegmentsAccessLock.lock();
try {
for (Integer key : buffers.keySet()) {
mySegments.remove(key);
}
}
finally {
mySegmentsAccessLock.unlock();
}
mySegmentsAllocationLock.lock();
try {
disposeRemovedSegments();
} finally {
mySegmentsAllocationLock.unlock();
}
}
}
private void flushBuffersForOwner(int index, StorageLockContext storageLockContext) {
Map<Integer, ByteBufferWrapper> buffers = getBuffersOrderedForOwner(index, storageLockContext);
if (buffers != null) {
mySegmentsAllocationLock.lock();
try {
for(ByteBufferWrapper buffer:buffers.values()) {
buffer.flush();
}
}
finally {
mySegmentsAllocationLock.unlock();
}
}
}
public void invalidateBuffer(int page) {
mySegmentsAccessLock.lock();
try {
mySegments.remove(page);
} finally {
mySegmentsAccessLock.unlock();
}
mySegmentsAllocationLock.lock();
try {
disposeRemovedSegments();
}
finally {
mySegmentsAllocationLock.unlock();
}
}
}
public static class StorageLockContext {
private final SequenceLock myLock;
private final StorageLock myStorageLock;
public StorageLockContext(StorageLock lock) {
myLock = new SequenceLock();
myStorageLock = lock;
}
}
}
| dirty flag made volatile and it sets same time as segment's dirty status is set
| platform/util/src/com/intellij/util/io/PagedFileStorage.java | dirty flag made volatile and it sets same time as segment's dirty status is set | <ide><path>latform/util/src/com/intellij/util/io/PagedFileStorage.java
<ide> }
<ide>
<ide> private final byte[] myTypedIOBuffer;
<del> private boolean isDirty = false;
<add> private volatile boolean isDirty = false;
<ide> private final File myFile;
<ide> protected long mySize = -1;
<ide> protected final int myPageSize;
<ide>
<ide> public void putInt(int addr, int value) {
<ide> if (myValuesAreBufferAligned) {
<del> isDirty = true;
<ide> int page = addr / myPageSize;
<ide> int page_offset = addr % myPageSize;
<ide> getBuffer(page).putInt(page_offset, value);
<ide>
<ide> public final void putShort(int addr, short value) {
<ide> if (myValuesAreBufferAligned) {
<del> isDirty = true;
<ide> int page = addr / myPageSize;
<ide> int page_offset = addr % myPageSize;
<ide> getBuffer(page).putShort(page_offset, value);
<ide>
<ide> public void putLong(int addr, long value) {
<ide> if (myValuesAreBufferAligned) {
<del> isDirty = true;
<ide> int page = addr / myPageSize;
<ide> int page_offset = addr % myPageSize;
<ide> getBuffer(page).putLong(page_offset, value);
<ide> }
<ide>
<ide> public void put(int index, byte value) {
<del> isDirty = true;
<ide> int page = index / myPageSize;
<ide> int offset = index % myPageSize;
<ide>
<ide> }
<ide>
<ide> public void put(int index, byte[] src, int offset, int length) {
<del> isDirty = true;
<ide> int i = index;
<ide> int o = offset;
<ide> int l = length;
<ide> if (myLastPage == page) {
<ide> ByteBuffer buf = myLastBuffer.getCachedBuffer();
<ide> if (buf != null && myLastChangeCount == myStorageLockContext.myStorageLock.myMappingChangeCount) {
<del> if (modify) myLastBuffer.markDirty();
<add> if (modify) markDirty(myLastBuffer);
<ide> return buf;
<ide> }
<ide> } else if (myLastPage2 == page) {
<ide> ByteBuffer buf = myLastBuffer2.getCachedBuffer();
<ide> if (buf != null && myLastChangeCount2 == myStorageLockContext.myStorageLock.myMappingChangeCount) {
<del> if (modify) myLastBuffer2.markDirty();
<add> if (modify) markDirty(myLastBuffer2);
<ide> return buf;
<ide> }
<ide> } else if (myLastPage3 == page) {
<ide> ByteBuffer buf = myLastBuffer3.getCachedBuffer();
<ide> if (buf != null && myLastChangeCount3 == myStorageLockContext.myStorageLock.myMappingChangeCount) {
<del> if (modify) myLastBuffer3.markDirty();
<add> if (modify) markDirty(myLastBuffer3);
<ide> return buf;
<ide> }
<ide> }
<ide> myStorageIndex = myStorageLockContext.myStorageLock.registerPagedFileStorage(this);
<ide> }
<ide> ByteBufferWrapper byteBufferWrapper = myStorageLockContext.myStorageLock.get(myStorageIndex | page);
<del> if (modify) byteBufferWrapper.markDirty();
<add> if (modify) markDirty(byteBufferWrapper);
<ide> ByteBuffer buf = byteBufferWrapper.getBuffer();
<ide>
<ide> if (myLastPage != page) {
<ide> catch (IOException e) {
<ide> throw new MappingFailedException("Cannot map buffer", e);
<ide> }
<add> }
<add>
<add> private void markDirty(ByteBufferWrapper buffer) {
<add> if (!isDirty) isDirty = true;
<add> buffer.markDirty();
<ide> }
<ide>
<ide> public void force() { |
|
Java | apache-2.0 | 331b3d222492383358ed42c288c27053253d1123 | 0 | joshuairl/toothchat-client,joshuairl/toothchat-client,joshuairl/toothchat-client,joshuairl/toothchat-client | /**
* $Revision: $
* $Date: $
*
* Copyright (C) 2006 Jive Software. All rights reserved.
*
* This software is published under the terms of the GNU Lesser Public License (LGPL),
* a copy of which is included in this distribution.
*/
package org.jivesoftware.spark.ui.status;
import org.jivesoftware.resource.Default;
import org.jivesoftware.resource.Res;
import org.jivesoftware.resource.SparkRes;
import org.jivesoftware.smack.packet.PacketExtension;
import org.jivesoftware.smack.packet.Presence;
import org.jivesoftware.smackx.packet.VCard;
import org.jivesoftware.spark.SparkManager;
import org.jivesoftware.spark.ui.PresenceListener;
import org.jivesoftware.spark.util.GraphicUtils;
import org.jivesoftware.spark.util.ModelUtil;
import org.jivesoftware.spark.util.SwingWorker;
import org.jivesoftware.spark.util.log.Log;
import org.jivesoftware.sparkimpl.profile.VCardManager;
import java.awt.Color;
import java.awt.FlowLayout;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Image;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.geom.AffineTransform;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.BorderFactory;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.SwingUtilities;
import javax.swing.border.Border;
public class StatusBar extends JPanel {
private List<StatusItem> statusList = new ArrayList<StatusItem>();
private JLabel imageLabel = new JLabel();
private JLabel nicknameLabel = new JLabel();
private StatusPanel statusPanel = new StatusPanel();
private Image backgroundImage;
private Presence currentPresence;
private JPanel commandPanel;
public StatusBar() {
setLayout(new GridBagLayout());
backgroundImage = Default.getImageIcon(Default.TOP_BOTTOM_BACKGROUND_IMAGE).getImage();
// Initialze command panel
commandPanel = new JPanel(new FlowLayout(FlowLayout.LEFT));
commandPanel.setOpaque(false);
ImageIcon brandedImage = Default.getImageIcon(Default.BRANDED_IMAGE);
if (brandedImage != null && brandedImage.getIconWidth() > 1) {
final JLabel brandedLabel = new JLabel(brandedImage);
// brandedLabel.setBorder(new PartialLineBorder(Color.LIGHT_GRAY, 1));
add(brandedLabel, new GridBagConstraints(3, 0, 1, 3, 1.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
}
add(imageLabel, new GridBagConstraints(0, 0, 1, 4, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
add(nicknameLabel, new GridBagConstraints(1, 0, 2, 2, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(5, 5, 0, 0), 0, 0));
add(statusPanel, new GridBagConstraints(1, 2, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(0, 2, 0, 0), 0, 0));
// Add Command Panel. We want adding command buttons to be simple.
add(commandPanel, new GridBagConstraints(1, 3, 3, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
nicknameLabel.setToolTipText(SparkManager.getConnection().getUser());
nicknameLabel.setFont(new Font("Dialog", Font.BOLD, 12));
populateDndList();
setStatus("Online");
currentPresence = new Presence(Presence.Type.available, "Online", -1, Presence.Mode.available);
setBorder(BorderFactory.createLineBorder(new Color(197, 213, 230), 1));
SparkManager.getSessionManager().addPresenceListener(new PresenceListener() {
public void presenceChanged(Presence presence) {
changeAvailability(presence);
}
});
// Show profile on double click of image label
imageLabel.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent mouseEvent) {
if (mouseEvent.getClickCount() == 1) {
VCardManager vcardManager = SparkManager.getVCardManager();
vcardManager.showProfile(SparkManager.getWorkspace());
}
}
public void mouseEntered(MouseEvent e) {
imageLabel.setCursor(GraphicUtils.HAND_CURSOR);
}
public void mouseExited(MouseEvent e) {
imageLabel.setCursor(GraphicUtils.DEFAULT_CURSOR);
}
});
}
public void setAvatar(Icon icon) {
imageLabel.setIcon(icon);
invalidate();
validateTree();
}
public void setNickname(String nickname) {
nicknameLabel.setText(nickname);
}
/**
* Sets the current status text in the Status Manager.
*
* @param status the status to set.
*/
public void setStatus(String status) {
statusPanel.setStatus(status);
}
public void showPopup(MouseEvent e) {
final JPopupMenu popup = new JPopupMenu();
List custom = CustomMessages.load();
if (custom == null) {
custom = new ArrayList();
}
// Build menu from StatusList
for (final StatusItem statusItem : statusList) {
final Action statusAction = new AbstractAction() {
public void actionPerformed(ActionEvent actionEvent) {
final String text = statusItem.getText();
final StatusItem si = getStatusItem(text);
if (si == null) {
// Custom status
Log.error("Unable to find status item for status - " + text);
return;
}
SwingWorker worker = new SwingWorker() {
public Object construct() {
SparkManager.getSessionManager().changePresence(si.getPresence());
return "ok";
}
public void finished() {
setStatus(text);
}
};
worker.start();
}
};
statusAction.putValue(Action.NAME, statusItem.getText());
statusAction.putValue(Action.SMALL_ICON, statusItem.getIcon());
// Has Children
boolean hasChildren = false;
Iterator customItemIterator = custom.iterator();
while (customItemIterator.hasNext()) {
final CustomStatusItem cItem = (CustomStatusItem)customItemIterator.next();
String type = cItem.getType();
if (type.equals(statusItem.getText())) {
hasChildren = true;
}
}
if (!hasChildren) {
// Add as Menu Item
popup.add(statusAction);
}
else {
final JMenu mainStatusItem = new JMenu(statusAction);
popup.add(mainStatusItem);
// Add Custom Messages
customItemIterator = custom.iterator();
while (customItemIterator.hasNext()) {
final CustomStatusItem customItem = (CustomStatusItem)customItemIterator.next();
String type = customItem.getType();
if (type.equals(statusItem.getText())) {
// Add Child Menu
Action action = new AbstractAction() {
public void actionPerformed(ActionEvent actionEvent) {
final String text = mainStatusItem.getText();
final StatusItem si = getStatusItem(text);
if (si == null) {
// Custom status
Log.error("Unable to find status item for status - " + text);
return;
}
SwingWorker worker = new SwingWorker() {
public Object construct() {
Presence oldPresence = si.getPresence();
Presence presence = copyPresence(oldPresence);
presence.setStatus(customItem.getStatus());
presence.setPriority(customItem.getPriority());
SparkManager.getSessionManager().changePresence(presence);
return "ok";
}
public void finished() {
String status = customItem.getType() + " - " + customItem.getStatus();
setStatus(status);
}
};
worker.start();
}
};
action.putValue(Action.NAME, customItem.getStatus());
action.putValue(Action.SMALL_ICON, statusItem.getIcon());
mainStatusItem.add(action);
}
}
// If menu has children, allow it to still be clickable.
mainStatusItem.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent mouseEvent) {
statusAction.actionPerformed(null);
popup.setVisible(false);
}
});
}
}
// Add change message
final JMenuItem changeStatusMenu = new JMenuItem(Res.getString("menuitem.set.status.message"), SparkRes.getImageIcon(SparkRes.BLANK_IMAGE));
popup.addSeparator();
popup.add(changeStatusMenu);
changeStatusMenu.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
CustomMessages.addCustomMessage();
}
});
Action editMessagesAction = new AbstractAction() {
public void actionPerformed(ActionEvent actionEvent) {
CustomMessages.editCustomMessages();
}
};
editMessagesAction.putValue(Action.NAME, Res.getString("menuitem.edit.status.message"));
popup.add(editMessagesAction);
popup.show(statusPanel, 0, statusPanel.getHeight());
}
public void changeAvailability(final Presence presence) {
if (presence == null) {
return;
}
if ((presence.getMode() == currentPresence.getMode()) && (presence.getType() == currentPresence.getType()) && (presence.getStatus().equals(currentPresence.getStatus()))) {
PacketExtension pe = presence.getExtension("x", "vcard-temp:x:update");
if (pe != null) {
// Update VCard
loadVCard();
}
return;
}
final Runnable changePresenceRunnable = new Runnable() {
public void run() {
currentPresence = presence;
setStatus(presence.getStatus());
StatusItem item = getItemFromPresence(currentPresence);
if (item != null) {
statusPanel.setIcon(item.getIcon());
}
}
};
SwingUtilities.invokeLater(changePresenceRunnable);
}
/**
* Populates the current Dnd List.
*/
private void populateDndList() {
final ImageIcon availableIcon = SparkRes.getImageIcon(SparkRes.GREEN_BALL);
final ImageIcon awayIcon = SparkRes.getImageIcon(SparkRes.IM_AWAY);
final ImageIcon dndIcon = SparkRes.getImageIcon(SparkRes.IM_DND);
final ImageIcon phoneIcon = SparkRes.getImageIcon(SparkRes.ON_PHONE_IMAGE);
StatusItem online = new StatusItem(new Presence(Presence.Type.available, "Online", -1, Presence.Mode.available), availableIcon);
StatusItem freeToChat = new StatusItem(new Presence(Presence.Type.available, "Free To Chat", -1, Presence.Mode.chat), SparkRes.getImageIcon(SparkRes.FREE_TO_CHAT_IMAGE));
StatusItem away = new StatusItem(new Presence(Presence.Type.available, "Away", -1, Presence.Mode.away), awayIcon);
StatusItem phone = new StatusItem(new Presence(Presence.Type.available, "On Phone", -1, Presence.Mode.away), phoneIcon);
StatusItem dnd = new StatusItem(new Presence(Presence.Type.available, "Do Not Disturb", -1, Presence.Mode.dnd), dndIcon);
StatusItem extendedAway = new StatusItem(new Presence(Presence.Type.available, "Extended Away", -1, Presence.Mode.xa), awayIcon);
statusList.add(freeToChat);
statusList.add(online);
statusList.add(away);
statusList.add(phone);
statusList.add(extendedAway);
statusList.add(dnd);
// Set default presence icon (Avaialble)
statusPanel.setIcon(availableIcon);
}
public StatusItem getItemFromPresence(Presence presence) {
// Handle offline presence
if (presence == null) {
return null;
}
Iterator statusItemIterator = statusList.iterator();
while (statusItemIterator.hasNext()) {
StatusItem item = (StatusItem)statusItemIterator.next();
if (presence.getStatus() != null && item.getPresence().getStatus() != null) {
if ((presence.getMode() == item.getPresence().getMode()) && (presence.getType() == item.getPresence().getType()) &&
(presence.getStatus().equals(item.getPresence().getStatus()))) {
return item;
}
}
}
statusItemIterator = statusList.iterator();
while (statusItemIterator.hasNext()) {
StatusItem item = (StatusItem)statusItemIterator.next();
if ((presence.getMode() == item.getPresence().getMode()) && (presence.getType() == item.getPresence().getType())) {
return item;
}
}
return null;
}
public Collection getStatusList() {
return statusList;
}
public Presence getPresence() {
return currentPresence;
}
public StatusItem getStatusItem(String label) {
Iterator iter = statusList.iterator();
while (iter.hasNext()) {
StatusItem item = (StatusItem)iter.next();
if (item.getText().equals(label)) {
return item;
}
}
return null;
}
public void paintComponent(Graphics g) {
double scaleX = getWidth() / (double)backgroundImage.getWidth(null);
double scaleY = getHeight() / (double)backgroundImage.getHeight(null);
AffineTransform xform = AffineTransform.getScaleInstance(scaleX, scaleY);
((Graphics2D)g).drawImage(backgroundImage, xform, this);
}
public void loadVCard() {
final SwingWorker worker = new SwingWorker() {
public Object construct() {
return SparkManager.getVCardManager().getVCard();
}
public void finished() {
final VCard vCard = (VCard)get();
populateWithVCardInfo(vCard);
}
};
worker.start();
}
private void populateWithVCardInfo(VCard vCard) {
if (vCard.getError() == null) {
String firstName = vCard.getFirstName();
String lastName = vCard.getLastName();
if (ModelUtil.hasLength(firstName) && ModelUtil.hasLength(lastName)) {
setNickname(firstName + " " + lastName);
}
else if (ModelUtil.hasLength(firstName)) {
setNickname(firstName);
}
else {
String nickname = SparkManager.getSessionManager().getUsername();
setNickname(nickname);
}
}
else {
String nickname = SparkManager.getSessionManager().getUsername();
setNickname(nickname);
return;
}
byte[] avatarBytes = null;
try {
avatarBytes = vCard.getAvatar();
}
catch (Exception e) {
Log.error("Cannot retrieve avatar bytes.", e);
}
if (avatarBytes != null) {
try {
ImageIcon avatarIcon = new ImageIcon(avatarBytes);
avatarIcon = VCardManager.scale(avatarIcon);
imageLabel.setIcon(avatarIcon);
imageLabel.setBorder(BorderFactory.createBevelBorder(0, Color.white, Color.lightGray));
imageLabel.invalidate();
imageLabel.validate();
imageLabel.repaint();
}
catch (Exception e) {
// no issue
}
}
}
public static Presence copyPresence(Presence presence) {
return new Presence(presence.getType(), presence.getStatus(), presence.getPriority(), presence.getMode());
}
/**
* Return the nickname Component used to display the users profile name.
*
* @return the label.
*/
public JLabel getNicknameLabel() {
return nicknameLabel;
}
private class StatusPanel extends JPanel {
private JLabel iconLabel;
private JLabel statusLabel;
public StatusPanel() {
super();
setOpaque(false);
iconLabel = new JLabel();
statusLabel = new JLabel();
setLayout(new GridBagLayout());
// Remove padding from icon label
iconLabel.setIconTextGap(0);
add(iconLabel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
add(statusLabel, new GridBagConstraints(1, 0, 1, 1, 1.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 2, 0, 0), 0, 0));
statusLabel.setFont(new Font("Dialog", Font.PLAIN, 11));
statusLabel.setIcon(SparkRes.getImageIcon(SparkRes.DOWN_ARROW_IMAGE));
statusLabel.setHorizontalTextPosition(JLabel.LEFT);
setOpaque(false);
final Border border = BorderFactory.createEmptyBorder(2, 2, 2, 2);
setBorder(border);
statusLabel.addMouseListener(new MouseAdapter() {
public void mouseReleased(MouseEvent e) {
showPopup(e);
}
public void mouseEntered(MouseEvent e) {
setCursor(GraphicUtils.HAND_CURSOR);
setBorder(BorderFactory.createBevelBorder(0));
}
public void mouseExited(MouseEvent e) {
setCursor(GraphicUtils.DEFAULT_CURSOR);
setBorder(border);
}
public void mousePressed(MouseEvent e) {
setBorder(BorderFactory.createBevelBorder(1));
}
});
}
public void setStatus(String status) {
int length = status.length();
String visualStatus = status;
if (length > 30) {
visualStatus = status.substring(0, 27) + "...";
}
statusLabel.setText(visualStatus);
statusLabel.setToolTipText(status);
}
public void setIcon(Icon icon) {
iconLabel.setIcon(icon);
}
}
public void setBackgroundImage(Image image) {
this.backgroundImage = image;
}
public JPanel getCommandPanel() {
return commandPanel;
}
}
| src/java/org/jivesoftware/spark/ui/status/StatusBar.java | /**
* $Revision: $
* $Date: $
*
* Copyright (C) 2006 Jive Software. All rights reserved.
*
* This software is published under the terms of the GNU Lesser Public License (LGPL),
* a copy of which is included in this distribution.
*/
package org.jivesoftware.spark.ui.status;
import org.jivesoftware.resource.Default;
import org.jivesoftware.resource.Res;
import org.jivesoftware.resource.SparkRes;
import org.jivesoftware.smack.packet.PacketExtension;
import org.jivesoftware.smack.packet.Presence;
import org.jivesoftware.smackx.packet.VCard;
import org.jivesoftware.spark.SparkManager;
import org.jivesoftware.spark.ui.PresenceListener;
import org.jivesoftware.spark.util.GraphicUtils;
import org.jivesoftware.spark.util.ModelUtil;
import org.jivesoftware.spark.util.SwingWorker;
import org.jivesoftware.spark.util.log.Log;
import org.jivesoftware.sparkimpl.profile.VCardManager;
import java.awt.Color;
import java.awt.FlowLayout;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Image;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.geom.AffineTransform;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.BorderFactory;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.SwingUtilities;
import javax.swing.border.Border;
public class StatusBar extends JPanel {
private List<StatusItem> statusList = new ArrayList<StatusItem>();
private JLabel imageLabel = new JLabel();
private JLabel nicknameLabel = new JLabel();
private StatusPanel statusPanel = new StatusPanel();
private Image backgroundImage;
private Presence currentPresence;
private JPanel commandPanel;
public StatusBar() {
setLayout(new GridBagLayout());
backgroundImage = Default.getImageIcon(Default.TOP_BOTTOM_BACKGROUND_IMAGE).getImage();
// Initialze command panel
commandPanel = new JPanel(new FlowLayout(FlowLayout.LEFT));
commandPanel.setOpaque(false);
ImageIcon brandedImage = Default.getImageIcon(Default.BRANDED_IMAGE);
if (brandedImage != null && brandedImage.getIconWidth() > 1) {
final JLabel brandedLabel = new JLabel(brandedImage);
// brandedLabel.setBorder(new PartialLineBorder(Color.LIGHT_GRAY, 1));
add(brandedLabel, new GridBagConstraints(3, 0, 1, 3, 1.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
}
add(imageLabel, new GridBagConstraints(0, 0, 1, 4, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
add(nicknameLabel, new GridBagConstraints(1, 0, 2, 2, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(5, 5, 0, 0), 0, 0));
add(statusPanel, new GridBagConstraints(1, 2, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(0, 2, 0, 0), 0, 0));
// Add Command Panel. We want adding command buttons to be simple.
add(commandPanel, new GridBagConstraints(1, 3, 3, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
nicknameLabel.setToolTipText(SparkManager.getConnection().getUser());
nicknameLabel.setFont(new Font("Dialog", Font.BOLD, 12));
populateDndList();
setStatus("Online");
currentPresence = new Presence(Presence.Type.available, "Online", -1, Presence.Mode.available);
setBorder(BorderFactory.createLineBorder(new Color(197, 213, 230), 1));
SparkManager.getSessionManager().addPresenceListener(new PresenceListener() {
public void presenceChanged(Presence presence) {
changeAvailability(presence);
}
});
// Show profile on double click of image label
imageLabel.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent mouseEvent) {
if (mouseEvent.getClickCount() == 1) {
VCardManager vcardManager = SparkManager.getVCardManager();
vcardManager.showProfile(SparkManager.getWorkspace());
}
}
public void mouseEntered(MouseEvent e) {
imageLabel.setCursor(GraphicUtils.HAND_CURSOR);
}
public void mouseExited(MouseEvent e) {
imageLabel.setCursor(GraphicUtils.DEFAULT_CURSOR);
}
});
}
public void setAvatar(Icon icon) {
imageLabel.setIcon(icon);
invalidate();
validateTree();
}
public void setNickname(String nickname) {
nicknameLabel.setText(nickname);
}
/**
* Sets the current status text in the Status Manager.
*
* @param status the status to set.
*/
public void setStatus(String status) {
statusPanel.setStatus(status);
}
public void showPopup(MouseEvent e) {
final JPopupMenu popup = new JPopupMenu();
List custom = CustomMessages.load();
if (custom == null) {
custom = new ArrayList();
}
// Build menu from StatusList
for(final StatusItem statusItem : statusList){
final Action statusAction = new AbstractAction() {
public void actionPerformed(ActionEvent actionEvent) {
final String text = statusItem.getText();
final StatusItem si = getStatusItem(text);
if (si == null) {
// Custom status
return;
}
SwingWorker worker = new SwingWorker() {
public Object construct() {
SparkManager.getSessionManager().changePresence(si.getPresence());
return "ok";
}
public void finished() {
setStatus(text);
}
};
worker.start();
}
};
statusAction.putValue(Action.NAME, statusItem.getText());
statusAction.putValue(Action.SMALL_ICON, statusItem.getIcon());
// Has Children
boolean hasChildren = false;
Iterator customItemIterator = custom.iterator();
while (customItemIterator.hasNext()) {
final CustomStatusItem cItem = (CustomStatusItem)customItemIterator.next();
String type = cItem.getType();
if (type.equals(statusItem.getText())) {
hasChildren = true;
}
}
if (!hasChildren) {
// Add as Menu Item
popup.add(statusAction);
}
else {
final JMenu mainStatusItem = new JMenu(statusAction);
popup.add(mainStatusItem);
// Add Custom Messages
customItemIterator = custom.iterator();
while (customItemIterator.hasNext()) {
final CustomStatusItem customItem = (CustomStatusItem)customItemIterator.next();
String type = customItem.getType();
if (type.equals(statusItem.getText())) {
// Add Child Menu
Action action = new AbstractAction() {
public void actionPerformed(ActionEvent actionEvent) {
final String text = mainStatusItem.getText();
final StatusItem si = getStatusItem(text);
if (si == null) {
// Custom status
return;
}
SwingWorker worker = new SwingWorker() {
public Object construct() {
Presence oldPresence = si.getPresence();
Presence presence = copyPresence(oldPresence);
presence.setStatus(customItem.getStatus());
presence.setPriority(customItem.getPriority());
SparkManager.getSessionManager().changePresence(presence);
return "ok";
}
public void finished() {
String status = customItem.getType() + " - " + customItem.getStatus();
setStatus(status);
}
};
worker.start();
}
};
action.putValue(Action.NAME, customItem.getStatus());
action.putValue(Action.SMALL_ICON, statusItem.getIcon());
mainStatusItem.add(action);
}
}
// If menu has children, allow it to still be clickable.
mainStatusItem.addMouseListener(new MouseAdapter() {
public void mouseClicked(MouseEvent mouseEvent) {
statusAction.actionPerformed(null);
popup.setVisible(false);
}
});
}
}
// Add change message
final JMenuItem changeStatusMenu = new JMenuItem(Res.getString("menuitem.set.status.message"), SparkRes.getImageIcon(SparkRes.BLANK_IMAGE));
popup.addSeparator();
popup.add(changeStatusMenu);
changeStatusMenu.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
CustomMessages.addCustomMessage();
}
});
Action editMessagesAction = new AbstractAction() {
public void actionPerformed(ActionEvent actionEvent) {
CustomMessages.editCustomMessages();
}
};
editMessagesAction.putValue(Action.NAME, Res.getString("menuitem.edit.status.message"));
popup.add(editMessagesAction);
popup.show(statusPanel, 0, statusPanel.getHeight());
}
public void changeAvailability(final Presence presence) {
if (presence == null) {
return;
}
if ((presence.getMode() == currentPresence.getMode()) && (presence.getType() == currentPresence.getType()) && (presence.getStatus().equals(currentPresence.getStatus()))) {
PacketExtension pe = presence.getExtension("x", "vcard-temp:x:update");
if (pe != null) {
// Update VCard
loadVCard();
}
return;
}
final Runnable changePresenceRunnable = new Runnable() {
public void run() {
currentPresence = presence;
setStatus(presence.getStatus());
StatusItem item = getItemFromPresence(currentPresence);
if (item != null) {
statusPanel.setIcon(item.getIcon());
}
}
};
SwingUtilities.invokeLater(changePresenceRunnable);
}
/**
* Populates the current Dnd List.
*/
private void populateDndList() {
final ImageIcon availableIcon = SparkRes.getImageIcon(SparkRes.GREEN_BALL);
final ImageIcon awayIcon = SparkRes.getImageIcon(SparkRes.IM_AWAY);
final ImageIcon dndIcon = SparkRes.getImageIcon(SparkRes.IM_DND);
final ImageIcon phoneIcon = SparkRes.getImageIcon(SparkRes.ON_PHONE_IMAGE);
StatusItem online = new StatusItem(new Presence(Presence.Type.available, "Online", -1, Presence.Mode.available), availableIcon);
StatusItem freeToChat = new StatusItem(new Presence(Presence.Type.available, "Free To Chat", -1, Presence.Mode.chat), SparkRes.getImageIcon(SparkRes.FREE_TO_CHAT_IMAGE));
StatusItem away = new StatusItem(new Presence(Presence.Type.available, "Away", -1, Presence.Mode.away), awayIcon);
StatusItem phone = new StatusItem(new Presence(Presence.Type.available, "On Phone", -1, Presence.Mode.away), phoneIcon);
StatusItem dnd = new StatusItem(new Presence(Presence.Type.available, "Do Not Disturb", -1, Presence.Mode.dnd), dndIcon);
StatusItem extendedAway = new StatusItem(new Presence(Presence.Type.available, "Extended Away", -1, Presence.Mode.xa), awayIcon);
statusList.add(freeToChat);
statusList.add(online);
statusList.add(away);
statusList.add(phone);
statusList.add(extendedAway);
statusList.add(dnd);
// Set default presence icon (Avaialble)
statusPanel.setIcon(availableIcon);
}
public StatusItem getItemFromPresence(Presence presence) {
// Handle offline presence
if (presence == null) {
return null;
}
Iterator statusItemIterator = statusList.iterator();
while (statusItemIterator.hasNext()) {
StatusItem item = (StatusItem)statusItemIterator.next();
if (presence.getStatus() != null && item.getPresence().getStatus() != null) {
if ((presence.getMode() == item.getPresence().getMode()) && (presence.getType() == item.getPresence().getType()) &&
(presence.getStatus().equals(item.getPresence().getStatus()))) {
return item;
}
}
}
statusItemIterator = statusList.iterator();
while (statusItemIterator.hasNext()) {
StatusItem item = (StatusItem)statusItemIterator.next();
if ((presence.getMode() == item.getPresence().getMode()) && (presence.getType() == item.getPresence().getType())) {
return item;
}
}
return null;
}
public Collection getStatusList() {
return statusList;
}
public Presence getPresence() {
return currentPresence;
}
public StatusItem getStatusItem(String label) {
Iterator iter = statusList.iterator();
while (iter.hasNext()) {
StatusItem item = (StatusItem)iter.next();
if (item.getText().equals(label)) {
return item;
}
}
return null;
}
public void paintComponent(Graphics g) {
double scaleX = getWidth() / (double)backgroundImage.getWidth(null);
double scaleY = getHeight() / (double)backgroundImage.getHeight(null);
AffineTransform xform = AffineTransform.getScaleInstance(scaleX, scaleY);
((Graphics2D)g).drawImage(backgroundImage, xform, this);
}
public void loadVCard() {
final SwingWorker worker = new SwingWorker() {
public Object construct() {
return SparkManager.getVCardManager().getVCard();
}
public void finished() {
final VCard vCard = (VCard)get();
populateWithVCardInfo(vCard);
}
};
worker.start();
}
private void populateWithVCardInfo(VCard vCard) {
if (vCard.getError() == null) {
String firstName = vCard.getFirstName();
String lastName = vCard.getLastName();
if (ModelUtil.hasLength(firstName) && ModelUtil.hasLength(lastName)) {
setNickname(firstName + " " + lastName);
}
else if (ModelUtil.hasLength(firstName)) {
setNickname(firstName);
}
else {
String nickname = SparkManager.getSessionManager().getUsername();
setNickname(nickname);
}
}
else {
String nickname = SparkManager.getSessionManager().getUsername();
setNickname(nickname);
return;
}
byte[] avatarBytes = null;
try {
avatarBytes = vCard.getAvatar();
}
catch (Exception e) {
Log.error("Cannot retrieve avatar bytes.", e);
}
if (avatarBytes != null) {
try {
ImageIcon avatarIcon = new ImageIcon(avatarBytes);
avatarIcon = VCardManager.scale(avatarIcon);
imageLabel.setIcon(avatarIcon);
imageLabel.setBorder(BorderFactory.createBevelBorder(0, Color.white, Color.lightGray));
imageLabel.invalidate();
imageLabel.validate();
imageLabel.repaint();
}
catch (Exception e) {
// no issue
}
}
}
public static Presence copyPresence(Presence presence) {
return new Presence(presence.getType(), presence.getStatus(), presence.getPriority(), presence.getMode());
}
/**
* Return the nickname Component used to display the users profile name.
*
* @return the label.
*/
public JLabel getNicknameLabel() {
return nicknameLabel;
}
private class StatusPanel extends JPanel {
private JLabel iconLabel;
private JLabel statusLabel;
public StatusPanel() {
super();
setOpaque(false);
iconLabel = new JLabel();
statusLabel = new JLabel();
setLayout(new GridBagLayout());
// Remove padding from icon label
iconLabel.setIconTextGap(0);
add(iconLabel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
add(statusLabel, new GridBagConstraints(1, 0, 1, 1, 1.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.NONE, new Insets(0, 2, 0, 0), 0, 0));
statusLabel.setFont(new Font("Dialog", Font.PLAIN, 11));
statusLabel.setIcon(SparkRes.getImageIcon(SparkRes.DOWN_ARROW_IMAGE));
statusLabel.setHorizontalTextPosition(JLabel.LEFT);
setOpaque(false);
final Border border = BorderFactory.createEmptyBorder(2, 2, 2, 2);
setBorder(border);
statusLabel.addMouseListener(new MouseAdapter() {
public void mouseReleased(MouseEvent e) {
showPopup(e);
}
public void mouseEntered(MouseEvent e) {
setCursor(GraphicUtils.HAND_CURSOR);
setBorder(BorderFactory.createBevelBorder(0));
}
public void mouseExited(MouseEvent e) {
setCursor(GraphicUtils.DEFAULT_CURSOR);
setBorder(border);
}
public void mousePressed(MouseEvent e) {
setBorder(BorderFactory.createBevelBorder(1));
}
});
}
public void setStatus(String status) {
int length = status.length();
String visualStatus = status;
if (length > 30) {
visualStatus = status.substring(0, 27) + "...";
}
statusLabel.setText(visualStatus);
statusLabel.setToolTipText(status);
}
public void setIcon(Icon icon) {
iconLabel.setIcon(icon);
}
}
public void setBackgroundImage(Image image) {
this.backgroundImage = image;
}
public JPanel getCommandPanel() {
return commandPanel;
}
}
| Minor fixes.
git-svn-id: f13e20fb8540f76a08799c0051229138c0279aa7@5683 b35dd754-fafc-0310-a699-88a17e54d16e
| src/java/org/jivesoftware/spark/ui/status/StatusBar.java | Minor fixes. | <ide><path>rc/java/org/jivesoftware/spark/ui/status/StatusBar.java
<ide> }
<ide>
<ide> // Build menu from StatusList
<del> for(final StatusItem statusItem : statusList){
<add> for (final StatusItem statusItem : statusList) {
<ide> final Action statusAction = new AbstractAction() {
<ide> public void actionPerformed(ActionEvent actionEvent) {
<ide> final String text = statusItem.getText();
<ide> final StatusItem si = getStatusItem(text);
<ide> if (si == null) {
<ide> // Custom status
<add> Log.error("Unable to find status item for status - " + text);
<ide> return;
<ide> }
<ide>
<ide> final StatusItem si = getStatusItem(text);
<ide> if (si == null) {
<ide> // Custom status
<add> Log.error("Unable to find status item for status - " + text);
<ide> return;
<ide> }
<ide> |
|
JavaScript | mit | cf7b78a83b064484f7b6276c9debba41a2a42cac | 0 | chinasb/meteor,PatrickMcGuinness/meteor,deanius/meteor,benjamn/meteor,Jeremy017/meteor,mauricionr/meteor,jeblister/meteor,zdd910/meteor,AlexR1712/meteor,GrimDerp/meteor,rabbyalone/meteor,alexbeletsky/meteor,AlexR1712/meteor,steedos/meteor,devgrok/meteor,shmiko/meteor,aramk/meteor,fashionsun/meteor,henrypan/meteor,Eynaliyev/meteor,zdd910/meteor,meteor-velocity/meteor,rabbyalone/meteor,EduShareOntario/meteor,aramk/meteor,michielvanoeffelen/meteor,servel333/meteor,whip112/meteor,jeblister/meteor,somallg/meteor,AlexR1712/meteor,jeblister/meteor,yanisIk/meteor,yonglehou/meteor,dboyliao/meteor,Theviajerock/meteor,bhargav175/meteor,Ken-Liu/meteor,codedogfish/meteor,stevenliuit/meteor,jirengu/meteor,baysao/meteor,katopz/meteor,esteedqueen/meteor,pandeysoni/meteor,Prithvi-A/meteor,lassombra/meteor,alphanso/meteor,saisai/meteor,chiefninew/meteor,Paulyoufu/meteor-1,jrudio/meteor,mjmasn/meteor,vacjaliu/meteor,DAB0mB/meteor,yinhe007/meteor,codingang/meteor,saisai/meteor,katopz/meteor,shadedprofit/meteor,JesseQin/meteor,juansgaitan/meteor,Ken-Liu/meteor,dboyliao/meteor,Quicksteve/meteor,henrypan/meteor,msavin/meteor,colinligertwood/meteor,arunoda/meteor,framewr/meteor,lieuwex/meteor,Puena/meteor,SeanOceanHu/meteor,evilemon/meteor,l0rd0fwar/meteor,shrop/meteor,papimomi/meteor,paul-barry-kenzan/meteor,jagi/meteor,HugoRLopes/meteor,yalexx/meteor,rabbyalone/meteor,dfischer/meteor,servel333/meteor,Jonekee/meteor,sitexa/meteor,lassombra/meteor,daltonrenaldo/meteor,h200863057/meteor,meonkeys/meteor,pandeysoni/meteor,calvintychan/meteor,akintoey/meteor,guazipi/meteor,namho102/meteor,AnthonyAstige/meteor,kengchau/meteor,sdeveloper/meteor,namho102/meteor,lpinto93/meteor,shmiko/meteor,paul-barry-kenzan/meteor,hristaki/meteor,colinligertwood/meteor,lorensr/meteor,rozzzly/meteor,DAB0mB/meteor,juansgaitan/meteor,newswim/meteor,TechplexEngineer/meteor,nuvipannu/meteor,baysao/meteor,shmiko/meteor,saisai/meteor,daslicht/meteor,karlito40/meteor,baiyunping333/meteor,benjamn/meteor,jirengu/meteor,devgrok/meteor,benjamn/meteor,esteedqueen/meteor,meteor-velocity/meteor,planet-training/meteor,h200863057/meteor,bhargav175/meteor,eluck/meteor,dandv/meteor,joannekoong/meteor,juansgaitan/meteor,rozzzly/meteor,karlito40/meteor,jagi/meteor,steedos/meteor,ljack/meteor,mauricionr/meteor,mubassirhayat/meteor,luohuazju/meteor,somallg/meteor,SeanOceanHu/meteor,jrudio/meteor,tdamsma/meteor,meonkeys/meteor,dandv/meteor,luohuazju/meteor,msavin/meteor,baiyunping333/meteor,cbonami/meteor,emmerge/meteor,yyx990803/meteor,eluck/meteor,elkingtonmcb/meteor,brdtrpp/meteor,lawrenceAIO/meteor,4commerce-technologies-AG/meteor,shrop/meteor,mirstan/meteor,justintung/meteor,shadedprofit/meteor,rozzzly/meteor,benstoltz/meteor,benjamn/meteor,eluck/meteor,allanalexandre/meteor,ashwathgovind/meteor,skarekrow/meteor,lpinto93/meteor,paul-barry-kenzan/meteor,paul-barry-kenzan/meteor,codedogfish/meteor,newswim/meteor,katopz/meteor,chasertech/meteor,IveWong/meteor,AnthonyAstige/meteor,dandv/meteor,chinasb/meteor,sclausen/meteor,katopz/meteor,colinligertwood/meteor,johnthepink/meteor,skarekrow/meteor,yonglehou/meteor,Hansoft/meteor,kidaa/meteor,emmerge/meteor,planet-training/meteor,chengxiaole/meteor,modulexcite/meteor,steedos/meteor,cherbst/meteor,judsonbsilva/meteor,allanalexandre/meteor,iman-mafi/meteor,planet-training/meteor,luohuazju/meteor,imanmafi/meteor,cog-64/meteor,tdamsma/meteor,youprofit/meteor,chinasb/meteor,akintoey/meteor,ashwathgovind/meteor,codedogfish/meteor,DCKT/meteor,dfischer/meteor,joannekoong/meteor,qscripter/meteor,yanisIk/meteor,sunny-g/meteor,framewr/meteor,Paulyoufu/meteor-1,williambr/meteor,Prithvi-A/meteor,yinhe007/meteor,chengxiaole/meteor,dfischer/meteor,skarekrow/meteor,kidaa/meteor,chinasb/meteor,ndarilek/meteor,arunoda/meteor,codedogfish/meteor,mirstan/meteor,juansgaitan/meteor,D1no/meteor,Prithvi-A/meteor,chiefninew/meteor,TribeMedia/meteor,Jonekee/meteor,queso/meteor,akintoey/meteor,PatrickMcGuinness/meteor,emmerge/meteor,esteedqueen/meteor,evilemon/meteor,hristaki/meteor,meonkeys/meteor,Jonekee/meteor,mirstan/meteor,daslicht/meteor,Theviajerock/meteor,rozzzly/meteor,elkingtonmcb/meteor,Hansoft/meteor,EduShareOntario/meteor,somallg/meteor,modulexcite/meteor,yiliaofan/meteor,neotim/meteor,youprofit/meteor,ndarilek/meteor,chiefninew/meteor,4commerce-technologies-AG/meteor,emmerge/meteor,Profab/meteor,whip112/meteor,modulexcite/meteor,aldeed/meteor,yonas/meteor-freebsd,SeanOceanHu/meteor,D1no/meteor,Profab/meteor,mubassirhayat/meteor,johnthepink/meteor,yanisIk/meteor,udhayam/meteor,dboyliao/meteor,zdd910/meteor,namho102/meteor,devgrok/meteor,jeblister/meteor,sunny-g/meteor,benjamn/meteor,udhayam/meteor,lpinto93/meteor,benstoltz/meteor,baysao/meteor,calvintychan/meteor,Puena/meteor,iman-mafi/meteor,TechplexEngineer/meteor,rabbyalone/meteor,AlexR1712/meteor,henrypan/meteor,kencheung/meteor,jg3526/meteor,tdamsma/meteor,michielvanoeffelen/meteor,Profab/meteor,chasertech/meteor,aleclarson/meteor,allanalexandre/meteor,kencheung/meteor,rozzzly/meteor,akintoey/meteor,elkingtonmcb/meteor,chiefninew/meteor,Jeremy017/meteor,stevenliuit/meteor,DAB0mB/meteor,TechplexEngineer/meteor,framewr/meteor,SeanOceanHu/meteor,queso/meteor,AnjirHossain/meteor,karlito40/meteor,jagi/meteor,papimomi/meteor,servel333/meteor,jg3526/meteor,rozzzly/meteor,ndarilek/meteor,rabbyalone/meteor,yonas/meteor-freebsd,h200863057/meteor,michielvanoeffelen/meteor,DAB0mB/meteor,sunny-g/meteor,queso/meteor,akintoey/meteor,ashwathgovind/meteor,henrypan/meteor,cog-64/meteor,qscripter/meteor,deanius/meteor,msavin/meteor,michielvanoeffelen/meteor,IveWong/meteor,lpinto93/meteor,msavin/meteor,papimomi/meteor,D1no/meteor,cherbst/meteor,daltonrenaldo/meteor,yyx990803/meteor,DCKT/meteor,lieuwex/meteor,ndarilek/meteor,yyx990803/meteor,devgrok/meteor,ashwathgovind/meteor,Prithvi-A/meteor,chengxiaole/meteor,hristaki/meteor,mubassirhayat/meteor,alexbeletsky/meteor,evilemon/meteor,chasertech/meteor,dev-bobsong/meteor,chiefninew/meteor,modulexcite/meteor,joannekoong/meteor,dboyliao/meteor,calvintychan/meteor,HugoRLopes/meteor,steedos/meteor,Urigo/meteor,devgrok/meteor,aleclarson/meteor,yonas/meteor-freebsd,meteor-velocity/meteor,justintung/meteor,Hansoft/meteor,pjump/meteor,shrop/meteor,neotim/meteor,Eynaliyev/meteor,lieuwex/meteor,Jonekee/meteor,yanisIk/meteor,Theviajerock/meteor,cherbst/meteor,sclausen/meteor,codedogfish/meteor,judsonbsilva/meteor,chmac/meteor,yalexx/meteor,judsonbsilva/meteor,queso/meteor,aleclarson/meteor,daslicht/meteor,servel333/meteor,cherbst/meteor,aldeed/meteor,msavin/meteor,evilemon/meteor,Jonekee/meteor,pandeysoni/meteor,DAB0mB/meteor,luohuazju/meteor,bhargav175/meteor,saisai/meteor,namho102/meteor,kengchau/meteor,akintoey/meteor,alphanso/meteor,jg3526/meteor,HugoRLopes/meteor,lieuwex/meteor,yyx990803/meteor,fashionsun/meteor,allanalexandre/meteor,jrudio/meteor,mubassirhayat/meteor,dfischer/meteor,modulexcite/meteor,kencheung/meteor,devgrok/meteor,oceanzou123/meteor,papimomi/meteor,PatrickMcGuinness/meteor,D1no/meteor,sclausen/meteor,karlito40/meteor,GrimDerp/meteor,kidaa/meteor,iman-mafi/meteor,saisai/meteor,iman-mafi/meteor,brdtrpp/meteor,pandeysoni/meteor,baysao/meteor,Eynaliyev/meteor,papimomi/meteor,arunoda/meteor,udhayam/meteor,chasertech/meteor,hristaki/meteor,yonas/meteor-freebsd,zdd910/meteor,imanmafi/meteor,Puena/meteor,paul-barry-kenzan/meteor,Ken-Liu/meteor,codingang/meteor,mirstan/meteor,whip112/meteor,lawrenceAIO/meteor,lpinto93/meteor,calvintychan/meteor,yanisIk/meteor,kengchau/meteor,jg3526/meteor,AnthonyAstige/meteor,codingang/meteor,iman-mafi/meteor,justintung/meteor,imanmafi/meteor,modulexcite/meteor,chasertech/meteor,paul-barry-kenzan/meteor,juansgaitan/meteor,IveWong/meteor,tdamsma/meteor,ndarilek/meteor,brdtrpp/meteor,alphanso/meteor,chengxiaole/meteor,LWHTarena/meteor,DAB0mB/meteor,alexbeletsky/meteor,ashwathgovind/meteor,shmiko/meteor,kengchau/meteor,sdeveloper/meteor,vjau/meteor,TribeMedia/meteor,D1no/meteor,ljack/meteor,aramk/meteor,queso/meteor,vacjaliu/meteor,jrudio/meteor,shmiko/meteor,jdivy/meteor,yinhe007/meteor,youprofit/meteor,youprofit/meteor,sitexa/meteor,somallg/meteor,Quicksteve/meteor,yinhe007/meteor,lassombra/meteor,dandv/meteor,cog-64/meteor,Quicksteve/meteor,baiyunping333/meteor,chengxiaole/meteor,meonkeys/meteor,chasertech/meteor,vacjaliu/meteor,D1no/meteor,DCKT/meteor,DAB0mB/meteor,TechplexEngineer/meteor,lorensr/meteor,johnthepink/meteor,D1no/meteor,jrudio/meteor,aldeed/meteor,shadedprofit/meteor,Urigo/meteor,TechplexEngineer/meteor,aldeed/meteor,aramk/meteor,codingang/meteor,mirstan/meteor,dev-bobsong/meteor,michielvanoeffelen/meteor,shrop/meteor,vacjaliu/meteor,whip112/meteor,queso/meteor,Eynaliyev/meteor,chmac/meteor,codedogfish/meteor,kengchau/meteor,cbonami/meteor,joannekoong/meteor,ljack/meteor,Prithvi-A/meteor,sclausen/meteor,chmac/meteor,steedos/meteor,AnjirHossain/meteor,EduShareOntario/meteor,yalexx/meteor,codingang/meteor,DCKT/meteor,4commerce-technologies-AG/meteor,JesseQin/meteor,PatrickMcGuinness/meteor,brettle/meteor,ndarilek/meteor,judsonbsilva/meteor,jagi/meteor,aramk/meteor,mjmasn/meteor,AlexR1712/meteor,yyx990803/meteor,mauricionr/meteor,joannekoong/meteor,Puena/meteor,chiefninew/meteor,pjump/meteor,kengchau/meteor,kidaa/meteor,benstoltz/meteor,jdivy/meteor,yiliaofan/meteor,jirengu/meteor,LWHTarena/meteor,aramk/meteor,framewr/meteor,deanius/meteor,brettle/meteor,msavin/meteor,rozzzly/meteor,EduShareOntario/meteor,lieuwex/meteor,pjump/meteor,namho102/meteor,nuvipannu/meteor,johnthepink/meteor,lpinto93/meteor,DCKT/meteor,ericterpstra/meteor,h200863057/meteor,benstoltz/meteor,dboyliao/meteor,arunoda/meteor,baiyunping333/meteor,l0rd0fwar/meteor,williambr/meteor,papimomi/meteor,jagi/meteor,mjmasn/meteor,pjump/meteor,luohuazju/meteor,JesseQin/meteor,yalexx/meteor,mauricionr/meteor,HugoRLopes/meteor,shadedprofit/meteor,msavin/meteor,h200863057/meteor,planet-training/meteor,skarekrow/meteor,chmac/meteor,codingang/meteor,imanmafi/meteor,baysao/meteor,daltonrenaldo/meteor,Hansoft/meteor,alexbeletsky/meteor,jdivy/meteor,sdeveloper/meteor,sdeveloper/meteor,AlexR1712/meteor,yinhe007/meteor,evilemon/meteor,Ken-Liu/meteor,EduShareOntario/meteor,Eynaliyev/meteor,somallg/meteor,esteedqueen/meteor,brdtrpp/meteor,iman-mafi/meteor,yyx990803/meteor,Jonekee/meteor,fashionsun/meteor,SeanOceanHu/meteor,esteedqueen/meteor,rabbyalone/meteor,jg3526/meteor,dandv/meteor,Quicksteve/meteor,Theviajerock/meteor,karlito40/meteor,dfischer/meteor,wmkcc/meteor,judsonbsilva/meteor,newswim/meteor,JesseQin/meteor,meteor-velocity/meteor,shmiko/meteor,qscripter/meteor,pjump/meteor,Ken-Liu/meteor,Urigo/meteor,jagi/meteor,servel333/meteor,wmkcc/meteor,alphanso/meteor,Profab/meteor,jrudio/meteor,GrimDerp/meteor,paul-barry-kenzan/meteor,Hansoft/meteor,imanmafi/meteor,fashionsun/meteor,daltonrenaldo/meteor,arunoda/meteor,Jeremy017/meteor,Urigo/meteor,brdtrpp/meteor,GrimDerp/meteor,mjmasn/meteor,ndarilek/meteor,JesseQin/meteor,dev-bobsong/meteor,cog-64/meteor,Hansoft/meteor,Eynaliyev/meteor,Jeremy017/meteor,udhayam/meteor,Quicksteve/meteor,allanalexandre/meteor,kidaa/meteor,mirstan/meteor,vjau/meteor,guazipi/meteor,newswim/meteor,eluck/meteor,jeblister/meteor,JesseQin/meteor,lawrenceAIO/meteor,baysao/meteor,Ken-Liu/meteor,PatrickMcGuinness/meteor,mirstan/meteor,IveWong/meteor,eluck/meteor,alexbeletsky/meteor,JesseQin/meteor,brettle/meteor,daslicht/meteor,lorensr/meteor,sunny-g/meteor,yanisIk/meteor,sunny-g/meteor,guazipi/meteor,DCKT/meteor,Jeremy017/meteor,chengxiaole/meteor,qscripter/meteor,michielvanoeffelen/meteor,vjau/meteor,Theviajerock/meteor,oceanzou123/meteor,arunoda/meteor,evilemon/meteor,lawrenceAIO/meteor,Paulyoufu/meteor-1,ashwathgovind/meteor,mauricionr/meteor,vjau/meteor,dev-bobsong/meteor,yiliaofan/meteor,daltonrenaldo/meteor,tdamsma/meteor,framewr/meteor,cbonami/meteor,yanisIk/meteor,nuvipannu/meteor,vjau/meteor,shadedprofit/meteor,jirengu/meteor,meteor-velocity/meteor,zdd910/meteor,Eynaliyev/meteor,jenalgit/meteor,joannekoong/meteor,emmerge/meteor,l0rd0fwar/meteor,oceanzou123/meteor,Puena/meteor,jdivy/meteor,Profab/meteor,namho102/meteor,hristaki/meteor,Hansoft/meteor,lorensr/meteor,katopz/meteor,saisai/meteor,calvintychan/meteor,chmac/meteor,yonglehou/meteor,jenalgit/meteor,HugoRLopes/meteor,johnthepink/meteor,brettle/meteor,mjmasn/meteor,AnthonyAstige/meteor,meonkeys/meteor,ndarilek/meteor,lassombra/meteor,bhargav175/meteor,daltonrenaldo/meteor,colinligertwood/meteor,stevenliuit/meteor,D1no/meteor,Jeremy017/meteor,brdtrpp/meteor,lieuwex/meteor,shadedprofit/meteor,brettle/meteor,jenalgit/meteor,qscripter/meteor,ljack/meteor,baysao/meteor,TribeMedia/meteor,Paulyoufu/meteor-1,dboyliao/meteor,sdeveloper/meteor,lassombra/meteor,udhayam/meteor,l0rd0fwar/meteor,4commerce-technologies-AG/meteor,dfischer/meteor,GrimDerp/meteor,Jeremy017/meteor,kidaa/meteor,Quicksteve/meteor,dev-bobsong/meteor,daslicht/meteor,oceanzou123/meteor,jirengu/meteor,tdamsma/meteor,lassombra/meteor,jenalgit/meteor,lassombra/meteor,deanius/meteor,papimomi/meteor,AnjirHossain/meteor,williambr/meteor,jirengu/meteor,hristaki/meteor,cog-64/meteor,servel333/meteor,jg3526/meteor,ericterpstra/meteor,GrimDerp/meteor,stevenliuit/meteor,daslicht/meteor,kencheung/meteor,williambr/meteor,yinhe007/meteor,chengxiaole/meteor,juansgaitan/meteor,pandeysoni/meteor,elkingtonmcb/meteor,EduShareOntario/meteor,alphanso/meteor,sdeveloper/meteor,yiliaofan/meteor,sitexa/meteor,lorensr/meteor,aldeed/meteor,deanius/meteor,Eynaliyev/meteor,codedogfish/meteor,neotim/meteor,chinasb/meteor,joannekoong/meteor,AnjirHossain/meteor,sclausen/meteor,IveWong/meteor,neotim/meteor,udhayam/meteor,cherbst/meteor,katopz/meteor,karlito40/meteor,whip112/meteor,yanisIk/meteor,pjump/meteor,wmkcc/meteor,wmkcc/meteor,guazipi/meteor,Puena/meteor,AnthonyAstige/meteor,Ken-Liu/meteor,dandv/meteor,oceanzou123/meteor,meteor-velocity/meteor,lawrenceAIO/meteor,h200863057/meteor,benjamn/meteor,IveWong/meteor,dev-bobsong/meteor,jenalgit/meteor,michielvanoeffelen/meteor,vjau/meteor,HugoRLopes/meteor,youprofit/meteor,planet-training/meteor,chinasb/meteor,mubassirhayat/meteor,meonkeys/meteor,h200863057/meteor,servel333/meteor,lawrenceAIO/meteor,arunoda/meteor,SeanOceanHu/meteor,baiyunping333/meteor,chmac/meteor,jdivy/meteor,esteedqueen/meteor,steedos/meteor,modulexcite/meteor,eluck/meteor,aramk/meteor,sunny-g/meteor,mauricionr/meteor,ericterpstra/meteor,stevenliuit/meteor,GrimDerp/meteor,codingang/meteor,ljack/meteor,alexbeletsky/meteor,luohuazju/meteor,Urigo/meteor,brettle/meteor,mjmasn/meteor,nuvipannu/meteor,AnjirHossain/meteor,kencheung/meteor,Paulyoufu/meteor-1,zdd910/meteor,jdivy/meteor,justintung/meteor,AnthonyAstige/meteor,Theviajerock/meteor,yonas/meteor-freebsd,shmiko/meteor,4commerce-technologies-AG/meteor,mjmasn/meteor,sitexa/meteor,daltonrenaldo/meteor,chasertech/meteor,Quicksteve/meteor,whip112/meteor,Profab/meteor,dboyliao/meteor,SeanOceanHu/meteor,pjump/meteor,oceanzou123/meteor,planet-training/meteor,Puena/meteor,4commerce-technologies-AG/meteor,johnthepink/meteor,jeblister/meteor,yonglehou/meteor,LWHTarena/meteor,tdamsma/meteor,daslicht/meteor,karlito40/meteor,l0rd0fwar/meteor,dboyliao/meteor,ericterpstra/meteor,brdtrpp/meteor,skarekrow/meteor,yonas/meteor-freebsd,sunny-g/meteor,yiliaofan/meteor,ericterpstra/meteor,planet-training/meteor,sdeveloper/meteor,deanius/meteor,alexbeletsky/meteor,cbonami/meteor,yiliaofan/meteor,Prithvi-A/meteor,qscripter/meteor,AnjirHossain/meteor,wmkcc/meteor,steedos/meteor,nuvipannu/meteor,whip112/meteor,eluck/meteor,l0rd0fwar/meteor,henrypan/meteor,somallg/meteor,esteedqueen/meteor,vacjaliu/meteor,evilemon/meteor,skarekrow/meteor,jeblister/meteor,bhargav175/meteor,sclausen/meteor,devgrok/meteor,kengchau/meteor,newswim/meteor,AnjirHossain/meteor,neotim/meteor,TribeMedia/meteor,elkingtonmcb/meteor,IveWong/meteor,baiyunping333/meteor,chiefninew/meteor,cbonami/meteor,yalexx/meteor,AlexR1712/meteor,yalexx/meteor,williambr/meteor,yyx990803/meteor,ericterpstra/meteor,TribeMedia/meteor,LWHTarena/meteor,judsonbsilva/meteor,l0rd0fwar/meteor,daltonrenaldo/meteor,iman-mafi/meteor,stevenliuit/meteor,shrop/meteor,jirengu/meteor,justintung/meteor,youprofit/meteor,yalexx/meteor,saisai/meteor,vacjaliu/meteor,neotim/meteor,luohuazju/meteor,karlito40/meteor,qscripter/meteor,oceanzou123/meteor,PatrickMcGuinness/meteor,mubassirhayat/meteor,wmkcc/meteor,benstoltz/meteor,justintung/meteor,pandeysoni/meteor,SeanOceanHu/meteor,williambr/meteor,jdivy/meteor,williambr/meteor,nuvipannu/meteor,yonglehou/meteor,judsonbsilva/meteor,justintung/meteor,alphanso/meteor,imanmafi/meteor,Paulyoufu/meteor-1,servel333/meteor,somallg/meteor,sunny-g/meteor,Paulyoufu/meteor-1,yonas/meteor-freebsd,Urigo/meteor,yinhe007/meteor,imanmafi/meteor,chiefninew/meteor,katopz/meteor,brdtrpp/meteor,somallg/meteor,DCKT/meteor,LWHTarena/meteor,chinasb/meteor,pandeysoni/meteor,colinligertwood/meteor,mubassirhayat/meteor,guazipi/meteor,shadedprofit/meteor,henrypan/meteor,dfischer/meteor,ashwathgovind/meteor,colinligertwood/meteor,queso/meteor,mubassirhayat/meteor,alexbeletsky/meteor,fashionsun/meteor,cbonami/meteor,skarekrow/meteor,newswim/meteor,deanius/meteor,LWHTarena/meteor,ljack/meteor,wmkcc/meteor,Jonekee/meteor,alphanso/meteor,jagi/meteor,sitexa/meteor,lawrenceAIO/meteor,framewr/meteor,TribeMedia/meteor,jenalgit/meteor,ljack/meteor,hristaki/meteor,cherbst/meteor,namho102/meteor,elkingtonmcb/meteor,yonglehou/meteor,HugoRLopes/meteor,stevenliuit/meteor,bhargav175/meteor,colinligertwood/meteor,benstoltz/meteor,cbonami/meteor,emmerge/meteor,kencheung/meteor,emmerge/meteor,guazipi/meteor,allanalexandre/meteor,brettle/meteor,eluck/meteor,calvintychan/meteor,zdd910/meteor,EduShareOntario/meteor,nuvipannu/meteor,TribeMedia/meteor,shrop/meteor,LWHTarena/meteor,shrop/meteor,lorensr/meteor,calvintychan/meteor,Theviajerock/meteor,henrypan/meteor,kidaa/meteor,rabbyalone/meteor,benstoltz/meteor,framewr/meteor,TechplexEngineer/meteor,yonglehou/meteor,cherbst/meteor,juansgaitan/meteor,lieuwex/meteor,dandv/meteor,meteor-velocity/meteor,johnthepink/meteor,benjamn/meteor,guazipi/meteor,tdamsma/meteor,sclausen/meteor,newswim/meteor,aldeed/meteor,bhargav175/meteor,sitexa/meteor,baiyunping333/meteor,youprofit/meteor,meonkeys/meteor,PatrickMcGuinness/meteor,fashionsun/meteor,AnthonyAstige/meteor,jenalgit/meteor,ljack/meteor,elkingtonmcb/meteor,Prithvi-A/meteor,akintoey/meteor,fashionsun/meteor,kencheung/meteor,cog-64/meteor,cog-64/meteor,allanalexandre/meteor,allanalexandre/meteor,TechplexEngineer/meteor,sitexa/meteor,AnthonyAstige/meteor,udhayam/meteor,HugoRLopes/meteor,aldeed/meteor,Profab/meteor,lpinto93/meteor,jg3526/meteor,Urigo/meteor,vacjaliu/meteor,dev-bobsong/meteor,mauricionr/meteor,planet-training/meteor,neotim/meteor,vjau/meteor,4commerce-technologies-AG/meteor,yiliaofan/meteor,chmac/meteor,ericterpstra/meteor,lorensr/meteor | // This file is used to access the "warehouse" of pre-0.9.0 releases. This code
// is now legacy, but we keep it around so that you can still use the same
// `meteor` entry point to run pre-0.9.0 and post-0.9.0 releases, for now. All
// it knows how to do is download old releases and explain to main.js how to
// exec them.
//
// Because of this, we do have to be careful that the files used by this code
// and the files used by tropohouse.js (the modern version of the warehouse)
// don't overlap. tropohouse does not use tools or releases directorys, and
// while they both have packages directories with similar structures, the
// version names should not overlap: warehouse versions are SHAs and tropohouse
// versions are semvers. Additionally, while they do both use the 'meteor'
// symlink at the top level, there's no actual code in this file to write that
// symlink (it was just created by the bootstrap tarball release process).
/// We store a "warehouse" of tools, releases and packages on
/// disk. This warehouse is populated from our servers, as needed.
///
/// Directory structure:
///
/// meteor (relative path symlink to tools/latest/bin/meteor)
/// tools/ (not in checkout, since we run against checked-out code)
/// latest/ (relative path symlink to latest VERSION/ tools directory)
/// VERSION/
/// releases/
/// latest (relative path symlink to latest x.y.z.release.json)
/// x.y.z.release.json
/// x.y.z.notices.json
/// packages/
/// foo/
/// VERSION/
///
/// The warehouse is not used at all when running from a
/// checkout. Only local packages will be loaded (from
/// CHECKOUT/packages or within a directory in the PACKAGE_DIRS
/// environment variable). The setup of that is handled by release.js.
var path = require("path");
var fs = require("fs");
var os = require("os");
var Future = require("fibers/future");
var _ = require("underscore");
var files = require('./files.js');
var utils = require('./utils.js');
var updater = require('./updater.js');
var httpHelpers = require('./http-helpers.js');
var fiberHelpers = require('./fiber-helpers.js');
var WAREHOUSE_URLBASE = 'https://warehouse.meteor.com';
var warehouse = exports;
_.extend(warehouse, {
// An exception meaning that you asked for a release that doesn't
// exist.
NoSuchReleaseError: function () {
},
// Return our loaded collection of tools, releases and
// packages. If we're running an installed version, found at
// $HOME/.meteor.
getWarehouseDir: function () {
// a hook for tests, or i guess for users.
if (process.env.METEOR_WAREHOUSE_DIR)
return process.env.METEOR_WAREHOUSE_DIR;
// This function should never be called unless we have a warehouse
// (an installed version, or with process.env.METEOR_WAREHOUSE_DIR
// set)
if (!files.usesWarehouse())
throw new Error("There's no warehouse in a git checkout");
return path.join(process.env.HOME, '.meteor');
},
getToolsDir: function (version) {
return path.join(warehouse.getWarehouseDir(), 'tools', version);
},
getToolsFreshFile: function (version) {
return path.join(warehouse.getWarehouseDir(), 'tools', version, '.fresh');
},
_latestReleaseSymlinkPath: function () {
return path.join(warehouse.getWarehouseDir(), 'releases', 'latest');
},
_latestToolsSymlinkPath: function () {
return path.join(warehouse.getWarehouseDir(), 'tools', 'latest');
},
// Ensure the passed release version is stored in the local
// warehouse and return its parsed manifest.
//
// If 'quiet' is true, don't print anything as we do it.
//
// Throws:
// - files.OfflineError if the release isn't cached locally and we
// are offline.
// - warehouse.NoSuchReleaseError if we talked to the server and it
// told us that no release named 'release' exists.
ensureReleaseExistsAndReturnManifest: function (release, quiet) {
if (!files.usesWarehouse())
throw new Error("Not in a warehouse but requesting a manifest!");
return warehouse._populateWarehouseForRelease(release, !quiet);
},
packageExistsInWarehouse: function (name, version) {
// A package exists if its directory exists. (We used to look for a
// particular file name ("package.js") inside the directory, but since we
// always install packages by untarring to a temporary directory and
// renaming atomically, we shouldn't worry about partial packages.)
return fs.existsSync(
path.join(warehouse.getWarehouseDir(), 'packages', name, version));
},
getPackageFreshFile: function (name, version) {
return path.join(warehouse.getWarehouseDir(), 'packages', name, version, '.fresh');
},
toolsExistsInWarehouse: function (version) {
return fs.existsSync(warehouse.getToolsDir(version));
},
_calculateNewPiecesForRelease: function (releaseManifest) {
// newPieces.tools and newPieces.packages[PACKAGE] are either falsey (if
// nothing is new), or an object with keys "version" and bool
// "needsDownload". "needsDownload" is true if the piece is not in the
// warehouse, and is false if it's in the warehouse but has never been used.
var newPieces = {
tools: null,
packages: {}
};
// populate warehouse with tools version for this release
var toolsVersion = releaseManifest.tools;
if (!warehouse.toolsExistsInWarehouse(toolsVersion)) {
newPieces.tools = {version: toolsVersion, needsDownload: true};
} else if (fs.existsSync(warehouse.getToolsFreshFile(toolsVersion))) {
newPieces.tools = {version: toolsVersion, needsDownload: false};
}
_.each(releaseManifest.packages, function (version, name) {
if (!warehouse.packageExistsInWarehouse(name, version)) {
newPieces.packages[name] = {version: version, needsDownload: true};
} else if (fs.existsSync(warehouse.getPackageFreshFile(name, version))) {
newPieces.packages[name] = {version: version, needsDownload: false};
}
});
if (newPieces.tools || !_.isEmpty(newPieces.packages))
return newPieces;
return null;
},
_packageUpdatesMessage: function (packageNames) {
var lines = [];
var width = 80; // see library.formatList for why we hardcode this
var currentLine = ' * Package updates:';
_.each(packageNames, function (name) {
if (currentLine.length + 1 + name.length <= width) {
currentLine += ' ' + name;
} else {
lines.push(currentLine);
currentLine = ' ' + name;
}
});
lines.push(currentLine);
return lines.join('\n');
},
// fetches the manifest file for the given release version. also fetches
// all of the missing versioned packages referenced from the release manifest
// @param releaseVersion {String} eg "0.1"
_populateWarehouseForRelease: function (releaseVersion, showInstalling) {
var future = new Future;
var releasesDir = path.join(warehouse.getWarehouseDir(), 'releases');
files.mkdir_p(releasesDir, 0755);
var releaseManifestPath = path.join(releasesDir,
releaseVersion + '.release.json');
// If the release already exists, we don't have to do anything, except maybe
// print a message if this release has never been used before (and we only
// have it due to a background download).
var releaseAlreadyExists = true;
try {
var releaseManifestText = fs.readFileSync(releaseManifestPath);
} catch (e) {
releaseAlreadyExists = false;
}
// Now get release manifest if we don't already have it, but only write it
// after we're done writing packages
if (!releaseAlreadyExists) {
// For automated self-test. If METEOR_TEST_FAIL_RELEASE_DOWNLOAD
// is 'offline' or 'not-found', make release downloads fail.
if (process.env.METEOR_TEST_FAIL_RELEASE_DOWNLOAD === "offline")
throw new files.OfflineError(new Error("scripted failure for tests"));
if (process.env.METEOR_TEST_FAIL_RELEASE_DOWNLOAD === "not-found")
throw new warehouse.NoSuchReleaseError;
try {
var result = httpHelpers.request(
WAREHOUSE_URLBASE + "/releases/" + releaseVersion + ".release.json");
} catch (e) {
throw new files.OfflineError(e);
}
if (result.response.statusCode !== 200)
// We actually got some response, so we're probably online and we
// just can't find the release.
throw new warehouse.NoSuchReleaseError;
releaseManifestText = result.body;
}
var releaseManifest = JSON.parse(releaseManifestText);
var newPieces = warehouse._calculateNewPiecesForRelease(releaseManifest);
if (releaseAlreadyExists && !newPieces)
return releaseManifest;
if (newPieces && showInstalling) {
console.log("Installing Meteor %s:", releaseVersion);
if (newPieces.tools) {
console.log(" * 'meteor' build tool (version %s)",
newPieces.tools.version);
}
if (!_.isEmpty(newPieces.packages)) {
console.log(warehouse._packageUpdatesMessage(
_.keys(newPieces.packages).sort()));
}
console.log();
}
if (!releaseAlreadyExists) {
if (newPieces && newPieces.tools && newPieces.tools.needsDownload) {
try {
warehouse.downloadToolsToWarehouse(
newPieces.tools.version,
warehouse._platform(),
warehouse.getWarehouseDir());
} catch (e) {
if (showInstalling)
console.error("Failed to load tools for release " + releaseVersion);
throw e;
}
// If the 'tools/latest' symlink doesn't exist, this must be the first
// legacy tools we've downloaded into this warehouse. Add the symlink,
// so that the tools doesn't get confused when it tries to readlink it.
if (!fs.existsSync(warehouse._latestToolsSymlinkPath())) {
fs.symlinkSync(newPieces.tools.version,
warehouse._latestToolsSymlinkPath());
}
}
var packagesToDownload = {};
_.each(newPieces && newPieces.packages, function (packageInfo, name) {
if (packageInfo.needsDownload)
packagesToDownload[name] = packageInfo.version;
});
if (!_.isEmpty(packagesToDownload)) {
try {
warehouse.downloadPackagesToWarehouse(packagesToDownload,
warehouse._platform(),
warehouse.getWarehouseDir());
} catch (e) {
if (showInstalling)
console.error("Failed to load packages for release " +
releaseVersion);
throw e;
}
}
// try getting the releases's notices. only blessed releases have one, so
// if we can't find it just proceed.
try {
var notices = httpHelpers.getUrl(
WAREHOUSE_URLBASE + "/releases/" + releaseVersion + ".notices.json");
// Real notices are valid JSON.
JSON.parse(notices);
fs.writeFileSync(
path.join(releasesDir, releaseVersion + '.notices.json'), notices);
} catch (e) {
// no notices, proceed
}
// Now that we have written all packages, it's safe to write the
// release manifest.
fs.writeFileSync(releaseManifestPath, releaseManifestText);
// If the 'releases/latest' symlink doesn't exist, this must be the first
// legacy release manifest we've downloaded into this warehouse. Add the
// symlink, so that the tools doesn't get confused when it tries to
// readlink it.
if (!fs.existsSync(warehouse._latestReleaseSymlinkPath())) {
fs.symlinkSync(releaseVersion + '.release.json',
warehouse._latestReleaseSymlinkPath());
}
}
// Finally, clear the "fresh" files for all the things we just printed
// (whether or not we just downloaded them). (Don't do this if we didn't
// print the installing message!)
if (newPieces && showInstalling) {
var unlinkIfExists = function (file) {
try {
fs.unlinkSync(file);
} catch (e) {
// If two processes populate the warehouse in parallel, the other
// process may have deleted the fresh file. That's OK!
if (e.code === "ENOENT")
return;
throw e;
}
};
if (newPieces.tools) {
unlinkIfExists(warehouse.getToolsFreshFile(newPieces.tools.version));
}
_.each(newPieces.packages, function (packageInfo, name) {
unlinkIfExists(
warehouse.getPackageFreshFile(name, packageInfo.version));
});
}
return releaseManifest;
},
// this function is also used by bless-release.js
downloadToolsToWarehouse: function (
toolsVersion, platform, warehouseDirectory, dontWriteFreshFile) {
// XXX this sucks. We store all the tarballs in memory. This is huge.
// We should instead stream packages in parallel. Since the node stream
// API is in flux, we should probably wait a bit.
// http://blog.nodejs.org/2012/12/20/streams2/
var toolsTarballFilename =
"meteor-tools-" + toolsVersion + "-" + platform + ".tar.gz";
var toolsTarballPath = "/tools/" + toolsVersion + "/"
+ toolsTarballFilename;
var toolsTarball = httpHelpers.getUrl({
url: WAREHOUSE_URLBASE + toolsTarballPath,
encoding: null
});
files.extractTarGz(toolsTarball,
path.join(warehouseDirectory, 'tools', toolsVersion));
if (!dontWriteFreshFile)
fs.writeFileSync(warehouse.getToolsFreshFile(toolsVersion), '');
},
// this function is also used by bless-release.js
downloadPackagesToWarehouse: function (packagesToDownload,
platform,
warehouseDirectory,
dontWriteFreshFile) {
fiberHelpers.parallelEach(
packagesToDownload, function (version, name) {
var packageDir = path.join(
warehouseDirectory, 'packages', name, version);
var packageUrl = WAREHOUSE_URLBASE + "/packages/" + name +
"/" + version +
"/" + name + '-' + version + "-" + platform + ".tar.gz";
var tarball = httpHelpers.getUrl({url: packageUrl, encoding: null});
files.extractTarGz(tarball, packageDir);
if (!dontWriteFreshFile)
fs.writeFileSync(warehouse.getPackageFreshFile(name, version), '');
});
},
_platform: function () {
// Normalize from Node "os.arch()" to "uname -m".
var arch = os.arch();
if (arch === "ia32")
arch = "i686";
else if (arch === "x64")
arch = "x86_64";
else
throw new Error("Unsupported architecture " + arch);
return os.type() + "_" + arch;
}
});
| tools/warehouse.js | // This file is used to access the "warehouse" of pre-0.9.0 releases. This code
// is now legacy, but we keep it around so that you can still use the same
// `meteor` entry point to run pre-0.9.0 and post-0.9.0 releases, for now. All
// it knows how to do is download old releases and explain to main.js how to
// exec them.
//
// Because of this, we do have to be careful that the files used by this code
// and the files used by tropohouse.js (the modern version of the warehouse)
// don't overlap. tropohouse does not use tools or releases directorys, and
// while they both have packages directories with similar structures, the
// version names should not overlap: warehouse versions are SHAs and tropohouse
// versions are semvers. Additionally, while they do both use the 'meteor'
// symlink at the top level, there's no actual code in this file to write that
// symlink (it was just created by the bootstrap tarball release process).
/// We store a "warehouse" of tools, releases and packages on
/// disk. This warehouse is populated from our servers, as needed.
///
/// Directory structure:
///
/// meteor (relative path symlink to tools/latest/bin/meteor)
/// tools/ (not in checkout, since we run against checked-out code)
/// latest/ (relative path symlink to latest VERSION/ tools directory)
/// VERSION/
/// releases/
/// latest (relative path symlink to latest x.y.z.release.json)
/// x.y.z.release.json
/// x.y.z.notices.json
/// packages/
/// foo/
/// VERSION/
///
/// The warehouse is not used at all when running from a
/// checkout. Only local packages will be loaded (from
/// CHECKOUT/packages or within a directory in the PACKAGE_DIRS
/// environment variable). The setup of that is handled by release.js.
var path = require("path");
var fs = require("fs");
var os = require("os");
var Future = require("fibers/future");
var _ = require("underscore");
var files = require('./files.js');
var utils = require('./utils.js');
var updater = require('./updater.js');
var httpHelpers = require('./http-helpers.js');
var fiberHelpers = require('./fiber-helpers.js');
var WAREHOUSE_URLBASE = 'https://warehouse.meteor.com';
var warehouse = exports;
_.extend(warehouse, {
// An exception meaning that you asked for a release that doesn't
// exist.
NoSuchReleaseError: function () {
},
// Return our loaded collection of tools, releases and
// packages. If we're running an installed version, found at
// $HOME/.meteor.
getWarehouseDir: function () {
// a hook for tests, or i guess for users.
if (process.env.METEOR_WAREHOUSE_DIR)
return process.env.METEOR_WAREHOUSE_DIR;
// This function should never be called unless we have a warehouse
// (an installed version, or with process.env.METEOR_WAREHOUSE_DIR
// set)
if (!files.usesWarehouse())
throw new Error("There's no warehouse in a git checkout");
return path.join(process.env.HOME, '.meteor');
},
getToolsDir: function (version) {
return path.join(warehouse.getWarehouseDir(), 'tools', version);
},
getToolsFreshFile: function (version) {
return path.join(warehouse.getWarehouseDir(), 'tools', version, '.fresh');
},
// Ensure the passed release version is stored in the local
// warehouse and return its parsed manifest.
//
// If 'quiet' is true, don't print anything as we do it.
//
// Throws:
// - files.OfflineError if the release isn't cached locally and we
// are offline.
// - warehouse.NoSuchReleaseError if we talked to the server and it
// told us that no release named 'release' exists.
ensureReleaseExistsAndReturnManifest: function (release, quiet) {
if (!files.usesWarehouse())
throw new Error("Not in a warehouse but requesting a manifest!");
return warehouse._populateWarehouseForRelease(release, !quiet);
},
packageExistsInWarehouse: function (name, version) {
// A package exists if its directory exists. (We used to look for a
// particular file name ("package.js") inside the directory, but since we
// always install packages by untarring to a temporary directory and
// renaming atomically, we shouldn't worry about partial packages.)
return fs.existsSync(
path.join(warehouse.getWarehouseDir(), 'packages', name, version));
},
getPackageFreshFile: function (name, version) {
return path.join(warehouse.getWarehouseDir(), 'packages', name, version, '.fresh');
},
toolsExistsInWarehouse: function (version) {
return fs.existsSync(warehouse.getToolsDir(version));
},
_calculateNewPiecesForRelease: function (releaseManifest) {
// newPieces.tools and newPieces.packages[PACKAGE] are either falsey (if
// nothing is new), or an object with keys "version" and bool
// "needsDownload". "needsDownload" is true if the piece is not in the
// warehouse, and is false if it's in the warehouse but has never been used.
var newPieces = {
tools: null,
packages: {}
};
// populate warehouse with tools version for this release
var toolsVersion = releaseManifest.tools;
if (!warehouse.toolsExistsInWarehouse(toolsVersion)) {
newPieces.tools = {version: toolsVersion, needsDownload: true};
} else if (fs.existsSync(warehouse.getToolsFreshFile(toolsVersion))) {
newPieces.tools = {version: toolsVersion, needsDownload: false};
}
_.each(releaseManifest.packages, function (version, name) {
if (!warehouse.packageExistsInWarehouse(name, version)) {
newPieces.packages[name] = {version: version, needsDownload: true};
} else if (fs.existsSync(warehouse.getPackageFreshFile(name, version))) {
newPieces.packages[name] = {version: version, needsDownload: false};
}
});
if (newPieces.tools || !_.isEmpty(newPieces.packages))
return newPieces;
return null;
},
_packageUpdatesMessage: function (packageNames) {
var lines = [];
var width = 80; // see library.formatList for why we hardcode this
var currentLine = ' * Package updates:';
_.each(packageNames, function (name) {
if (currentLine.length + 1 + name.length <= width) {
currentLine += ' ' + name;
} else {
lines.push(currentLine);
currentLine = ' ' + name;
}
});
lines.push(currentLine);
return lines.join('\n');
},
// fetches the manifest file for the given release version. also fetches
// all of the missing versioned packages referenced from the release manifest
// @param releaseVersion {String} eg "0.1"
_populateWarehouseForRelease: function (releaseVersion, showInstalling) {
var future = new Future;
var releasesDir = path.join(warehouse.getWarehouseDir(), 'releases');
files.mkdir_p(releasesDir, 0755);
var releaseManifestPath = path.join(releasesDir,
releaseVersion + '.release.json');
// If the release already exists, we don't have to do anything, except maybe
// print a message if this release has never been used before (and we only
// have it due to a background download).
var releaseAlreadyExists = true;
try {
var releaseManifestText = fs.readFileSync(releaseManifestPath);
} catch (e) {
releaseAlreadyExists = false;
}
// Now get release manifest if we don't already have it, but only write it
// after we're done writing packages
if (!releaseAlreadyExists) {
// For automated self-test. If METEOR_TEST_FAIL_RELEASE_DOWNLOAD
// is 'offline' or 'not-found', make release downloads fail.
if (process.env.METEOR_TEST_FAIL_RELEASE_DOWNLOAD === "offline")
throw new files.OfflineError(new Error("scripted failure for tests"));
if (process.env.METEOR_TEST_FAIL_RELEASE_DOWNLOAD === "not-found")
throw new warehouse.NoSuchReleaseError;
try {
var result = httpHelpers.request(
WAREHOUSE_URLBASE + "/releases/" + releaseVersion + ".release.json");
} catch (e) {
throw new files.OfflineError(e);
}
if (result.response.statusCode !== 200)
// We actually got some response, so we're probably online and we
// just can't find the release.
throw new warehouse.NoSuchReleaseError;
releaseManifestText = result.body;
}
var releaseManifest = JSON.parse(releaseManifestText);
var newPieces = warehouse._calculateNewPiecesForRelease(releaseManifest);
if (releaseAlreadyExists && !newPieces)
return releaseManifest;
if (newPieces && showInstalling) {
console.log("Installing Meteor %s:", releaseVersion);
if (newPieces.tools) {
console.log(" * 'meteor' build tool (version %s)",
newPieces.tools.version);
}
if (!_.isEmpty(newPieces.packages)) {
console.log(warehouse._packageUpdatesMessage(
_.keys(newPieces.packages).sort()));
}
console.log();
}
if (!releaseAlreadyExists) {
if (newPieces && newPieces.tools && newPieces.tools.needsDownload) {
try {
warehouse.downloadToolsToWarehouse(
newPieces.tools.version,
warehouse._platform(),
warehouse.getWarehouseDir());
} catch (e) {
if (showInstalling)
console.error("Failed to load tools for release " + releaseVersion);
throw e;
}
}
var packagesToDownload = {};
_.each(newPieces && newPieces.packages, function (packageInfo, name) {
if (packageInfo.needsDownload)
packagesToDownload[name] = packageInfo.version;
});
if (!_.isEmpty(packagesToDownload)) {
try {
warehouse.downloadPackagesToWarehouse(packagesToDownload,
warehouse._platform(),
warehouse.getWarehouseDir());
} catch (e) {
if (showInstalling)
console.error("Failed to load packages for release " +
releaseVersion);
throw e;
}
}
// try getting the releases's notices. only blessed releases have one, so
// if we can't find it just proceed.
try {
var notices = httpHelpers.getUrl(
WAREHOUSE_URLBASE + "/releases/" + releaseVersion + ".notices.json");
// Real notices are valid JSON.
JSON.parse(notices);
fs.writeFileSync(
path.join(releasesDir, releaseVersion + '.notices.json'), notices);
} catch (e) {
// no notices, proceed
}
// Now that we have written all packages, it's safe to write the
// release manifest.
fs.writeFileSync(releaseManifestPath, releaseManifestText);
}
// Finally, clear the "fresh" files for all the things we just printed
// (whether or not we just downloaded them). (Don't do this if we didn't
// print the installing message!)
if (newPieces && showInstalling) {
var unlinkIfExists = function (file) {
try {
fs.unlinkSync(file);
} catch (e) {
// If two processes populate the warehouse in parallel, the other
// process may have deleted the fresh file. That's OK!
if (e.code === "ENOENT")
return;
throw e;
}
};
if (newPieces.tools) {
unlinkIfExists(warehouse.getToolsFreshFile(newPieces.tools.version));
}
_.each(newPieces.packages, function (packageInfo, name) {
unlinkIfExists(
warehouse.getPackageFreshFile(name, packageInfo.version));
});
}
return releaseManifest;
},
// this function is also used by bless-release.js
downloadToolsToWarehouse: function (
toolsVersion, platform, warehouseDirectory, dontWriteFreshFile) {
// XXX this sucks. We store all the tarballs in memory. This is huge.
// We should instead stream packages in parallel. Since the node stream
// API is in flux, we should probably wait a bit.
// http://blog.nodejs.org/2012/12/20/streams2/
var toolsTarballFilename =
"meteor-tools-" + toolsVersion + "-" + platform + ".tar.gz";
var toolsTarballPath = "/tools/" + toolsVersion + "/"
+ toolsTarballFilename;
var toolsTarball = httpHelpers.getUrl({
url: WAREHOUSE_URLBASE + toolsTarballPath,
encoding: null
});
files.extractTarGz(toolsTarball,
path.join(warehouseDirectory, 'tools', toolsVersion));
if (!dontWriteFreshFile)
fs.writeFileSync(warehouse.getToolsFreshFile(toolsVersion), '');
},
// this function is also used by bless-release.js
downloadPackagesToWarehouse: function (packagesToDownload,
platform,
warehouseDirectory,
dontWriteFreshFile) {
fiberHelpers.parallelEach(
packagesToDownload, function (version, name) {
var packageDir = path.join(
warehouseDirectory, 'packages', name, version);
var packageUrl = WAREHOUSE_URLBASE + "/packages/" + name +
"/" + version +
"/" + name + '-' + version + "-" + platform + ".tar.gz";
var tarball = httpHelpers.getUrl({url: packageUrl, encoding: null});
files.extractTarGz(tarball, packageDir);
if (!dontWriteFreshFile)
fs.writeFileSync(warehouse.getPackageFreshFile(name, version), '');
});
},
_platform: function () {
// Normalize from Node "os.arch()" to "uname -m".
var arch = os.arch();
if (arch === "ia32")
arch = "i686";
else if (arch === "x64")
arch = "x86_64";
else
throw new Error("Unsupported architecture " + arch);
return os.type() + "_" + arch;
}
});
| ensure that latest symlinks exist
otherwise when we legacy-springboard it will be sad
| tools/warehouse.js | ensure that latest symlinks exist | <ide><path>ools/warehouse.js
<ide> return path.join(warehouse.getWarehouseDir(), 'tools', version, '.fresh');
<ide> },
<ide>
<add> _latestReleaseSymlinkPath: function () {
<add> return path.join(warehouse.getWarehouseDir(), 'releases', 'latest');
<add> },
<add>
<add> _latestToolsSymlinkPath: function () {
<add> return path.join(warehouse.getWarehouseDir(), 'tools', 'latest');
<add> },
<add>
<ide> // Ensure the passed release version is stored in the local
<ide> // warehouse and return its parsed manifest.
<ide> //
<ide> if (showInstalling)
<ide> console.error("Failed to load tools for release " + releaseVersion);
<ide> throw e;
<add> }
<add>
<add> // If the 'tools/latest' symlink doesn't exist, this must be the first
<add> // legacy tools we've downloaded into this warehouse. Add the symlink,
<add> // so that the tools doesn't get confused when it tries to readlink it.
<add> if (!fs.existsSync(warehouse._latestToolsSymlinkPath())) {
<add> fs.symlinkSync(newPieces.tools.version,
<add> warehouse._latestToolsSymlinkPath());
<ide> }
<ide> }
<ide>
<ide> // Now that we have written all packages, it's safe to write the
<ide> // release manifest.
<ide> fs.writeFileSync(releaseManifestPath, releaseManifestText);
<add>
<add> // If the 'releases/latest' symlink doesn't exist, this must be the first
<add> // legacy release manifest we've downloaded into this warehouse. Add the
<add> // symlink, so that the tools doesn't get confused when it tries to
<add> // readlink it.
<add> if (!fs.existsSync(warehouse._latestReleaseSymlinkPath())) {
<add> fs.symlinkSync(releaseVersion + '.release.json',
<add> warehouse._latestReleaseSymlinkPath());
<add> }
<ide> }
<ide>
<ide> // Finally, clear the "fresh" files for all the things we just printed |
|
Java | mit | edb2dfcc28e546c17928fc7b3e1a56604fcf65f6 | 0 | openfact/openfact,openfact/openfact-temp,openfact/openfact-pe,openfact/openfact-pe,openfact/openfact,openfact/openfact-temp,openfact/openfact-temp,openfact/openfact,openfact/openfact-pe,openfact/openfact-temp | package org.openfact.ubl.send.pe;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.activation.DataHandler;
import javax.activation.DataSource;
import javax.activation.FileDataSource;
import javax.xml.transform.TransformerException;
import org.openfact.models.ModelException;
import org.openfact.models.OpenfactSession;
import org.openfact.models.OrganizationModel;
import org.openfact.models.ubl.CreditNoteModel;
import org.openfact.models.ubl.DebitNoteModel;
import org.openfact.models.ubl.InvoiceModel;
import org.openfact.models.utils.DocumentUtils;
import org.openfact.ubl.UblProvider;
import org.openfact.ubl.pe.constants.CodigoTipoDocumento;
import org.openfact.ubl.send.UblSenderException;
import org.openfact.ubl.send.UblSenderProvider;
import org.openfact.ubl.send.UblTemplateProvider;
import org.openfact.ubl.send.pe.header.UblHeaderHandlerResolver;
import org.openfact.ubl.send.pe.sunat.BillService;
import org.openfact.ubl.send.pe.sunat.BillService_Service;
import org.w3c.dom.Document;
import com.sun.xml.ws.util.ByteArrayDataSource;
import jodd.io.ZipBuilder;
public class UblTemplateProvider_PE implements UblTemplateProvider {
private OpenfactSession session;
private OrganizationModel organization;
public UblTemplateProvider_PE(OpenfactSession session) {
this.session = session;
}
@Override
public void close() {
}
@Override
public UblTemplateProvider setOrganization(OrganizationModel organization) {
this.organization = organization;
return this;
}
@Override
public void send(String type) throws UblSenderException {
throw new ModelException("method not implemented");
}
@Override
public void sendInvoice(InvoiceModel invoice) throws UblSenderException {
String fileName = generateXmlFileName(invoice);
Document document = getUblProvider(organization).getDocument(organization, invoice);
try {
byte[] zip = generateZip(document, fileName);
File file = new File("/home/admin/ubl/"+fileName+".zip");
FileOutputStream fos = new FileOutputStream(file);
fos.write(zip);
fos.close();
// Call Web Service Operation
BillService_Service service = new BillService_Service();
service.setHandlerResolver(new UblHeaderHandlerResolver(organization.getUblSenderConfig()));
BillService port = service.getBillServicePort();
// Config data
DataSource dataSource = new ByteArrayDataSource(zip, "application/zip");
DataHandler contentFile = new DataHandler(dataSource);
// Send
byte[] result = port.sendBill(fileName + ".zip", contentFile);
} catch (TransformerException e) {
throw new UblSenderException(e);
} catch (IOException e) {
throw new UblSenderException(e);
}
}
@Override
public void sendCreditNote(CreditNoteModel creditNote) throws UblSenderException {
String fileName = generateXmlFileName(creditNote);
Document document = getUblProvider(organization).getDocument(organization, creditNote);
try {
byte[] zip = generateZip(document, fileName);
File file = new File("/home/admin/ubl/"+fileName+".zip");
FileOutputStream fos = new FileOutputStream(file);
fos.write(zip);
fos.close();
// Call Web Service Operation
BillService_Service service = new BillService_Service();
service.setHandlerResolver(new UblHeaderHandlerResolver(organization.getUblSenderConfig()));
BillService port = service.getBillServicePort();
// Config data
DataSource dataSource = new ByteArrayDataSource(zip, "application/zip");
DataHandler contentFile = new DataHandler(dataSource);
// Send
byte[] result = port.sendBill(fileName + ".zip", contentFile);
} catch (TransformerException e) {
throw new UblSenderException(e);
} catch (IOException e) {
throw new UblSenderException(e);
}
}
@Override
public void sendDebitNote(DebitNoteModel debitNote) throws UblSenderException {
String fileName = generateXmlFileName(debitNote);
Document document = getUblProvider(organization).getDocument(organization, debitNote);
try {
byte[] zip = generateZip(document, fileName);
File file = new File("/home/admin/ubl/"+fileName+".zip");
FileOutputStream fos = new FileOutputStream(file);
fos.write(zip);
fos.close();
// Call Web Service Operation
BillService_Service service = new BillService_Service();
service.setHandlerResolver(new UblHeaderHandlerResolver(organization.getUblSenderConfig()));
BillService port = service.getBillServicePort();
// Config data
DataSource dataSource = new ByteArrayDataSource(zip, "application/zip");
DataHandler contentFile = new DataHandler(dataSource);
// Send
byte[] result = port.sendBill(fileName + ".zip", contentFile);
} catch (TransformerException e) {
throw new UblSenderException(e);
} catch (IOException e) {
throw new UblSenderException(e);
}
}
private byte[] generateZip(Document document, String fileName) throws TransformerException, IOException {
byte[] bytes = DocumentUtils.getBytesFromDocument(document);
return ZipBuilder.createZipInMemory().addFolder("dummy/").add(bytes).path(fileName + ".xml").save().toBytes();
}
private UblProvider getUblProvider(OrganizationModel organization) {
return session.getProvider(UblProvider.class, organization.getDefaultUblLocale());
}
private void send(String fileName, byte[] file, String contentType) throws UblSenderException {
UblSenderProvider ublSender = session.getProvider(UblSenderProvider.class, "soap");
ublSender.send(organization, fileName, file, contentType);
}
private String generateXmlFileName(InvoiceModel invoice) throws UblSenderException {
if (organization.getAssignedIdentificationId() == null) {
throw new UblSenderException("Organization doesn't have assignedIdentificationId", new Throwable());
}
String codido;
if (invoice.getInvoiceTypeCode().equals(CodigoTipoDocumento.FACTURA.getCodigo())) {
codido = CodigoTipoDocumento.FACTURA.getCodigo();
} else if (invoice.getInvoiceTypeCode().equals(CodigoTipoDocumento.BOLETA.getCodigo())) {
codido = CodigoTipoDocumento.BOLETA.getCodigo();
} else {
throw new UblSenderException("Invalid invoice code", new Throwable());
}
StringBuilder sb = new StringBuilder();
sb.append(organization.getAssignedIdentificationId()).append("-");
sb.append(codido).append("-");
sb.append(invoice.getID());
return sb.toString();
}
private String generateXmlFileName(CreditNoteModel creditNote) throws UblSenderException {
if (organization.getAssignedIdentificationId() == null) {
throw new UblSenderException("Organization doesn't have assignedIdentificationId", new Throwable());
}
String codido = CodigoTipoDocumento.NOTA_CREDITO.getCodigo();
StringBuilder sb = new StringBuilder();
sb.append(organization.getAssignedIdentificationId()).append("-");
sb.append(codido).append("-");
sb.append(creditNote.getID());
return sb.toString();
}
private String generateXmlFileName(DebitNoteModel debitNote) throws UblSenderException {
if (organization.getAssignedIdentificationId() == null) {
throw new UblSenderException("Organization doesn't have assignedIdentificationId", new Throwable());
}
String codido = CodigoTipoDocumento.NOTA_DEBITO.getCodigo();
StringBuilder sb = new StringBuilder();
sb.append(organization.getAssignedIdentificationId()).append("-");
sb.append(codido).append("-");
sb.append(debitNote.getID());
return sb.toString();
}
}
| services/src/main/java/org/openfact/ubl/send/pe/UblTemplateProvider_PE.java | package org.openfact.ubl.send.pe;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.activation.DataHandler;
import javax.activation.DataSource;
import javax.activation.FileDataSource;
import javax.xml.transform.TransformerException;
import org.openfact.models.ModelException;
import org.openfact.models.OpenfactSession;
import org.openfact.models.OrganizationModel;
import org.openfact.models.ubl.CreditNoteModel;
import org.openfact.models.ubl.DebitNoteModel;
import org.openfact.models.ubl.InvoiceModel;
import org.openfact.models.utils.DocumentUtils;
import org.openfact.ubl.UblProvider;
import org.openfact.ubl.pe.constants.CodigoTipoDocumento;
import org.openfact.ubl.send.UblSenderException;
import org.openfact.ubl.send.UblSenderProvider;
import org.openfact.ubl.send.UblTemplateProvider;
import org.openfact.ubl.send.pe.header.UblHeaderHandlerResolver;
import org.openfact.ubl.send.pe.sunat.BillService;
import org.openfact.ubl.send.pe.sunat.BillService_Service;
import org.w3c.dom.Document;
import com.sun.xml.ws.util.ByteArrayDataSource;
import jodd.io.ZipBuilder;
public class UblTemplateProvider_PE implements UblTemplateProvider {
private OpenfactSession session;
private OrganizationModel organization;
public UblTemplateProvider_PE(OpenfactSession session) {
this.session = session;
}
@Override
public void close() {
}
@Override
public UblTemplateProvider setOrganization(OrganizationModel organization) {
this.organization = organization;
return this;
}
@Override
public void send(String type) throws UblSenderException {
throw new ModelException("method not implemented");
}
@Override
public void sendInvoice(InvoiceModel invoice) throws UblSenderException {
String fileName = generateXmlFileName(invoice);
Document document = getUblProvider(organization).getDocument(organization, invoice);
try {
byte[] zip = generateZip(document, fileName);
// Call Web Service Operation
BillService_Service service = new BillService_Service();
service.setHandlerResolver(new UblHeaderHandlerResolver(organization.getUblSenderConfig()));
BillService port = service.getBillServicePort();
// Config data
DataSource dataSource = new ByteArrayDataSource(zip, "application/zip");
DataHandler contentFile = new DataHandler(dataSource);
// Send
byte[] result = port.sendBill(fileName + ".zip", contentFile);
} catch (TransformerException e) {
throw new UblSenderException(e);
} catch (IOException e) {
throw new UblSenderException(e);
}
}
@Override
public void sendCreditNote(CreditNoteModel creditNote) throws UblSenderException {
String fileName = generateXmlFileName(creditNote);
Document document = getUblProvider(organization).getDocument(organization, creditNote);
try {
byte[] zip = generateZip(document, fileName);
// Call Web Service Operation
BillService_Service service = new BillService_Service();
service.setHandlerResolver(new UblHeaderHandlerResolver(organization.getUblSenderConfig()));
BillService port = service.getBillServicePort();
// Config data
DataSource dataSource = new ByteArrayDataSource(zip, "application/zip");
DataHandler contentFile = new DataHandler(dataSource);
// Send
byte[] result = port.sendBill(fileName + ".zip", contentFile);
} catch (TransformerException e) {
throw new UblSenderException(e);
} catch (IOException e) {
throw new UblSenderException(e);
}
}
@Override
public void sendDebitNote(DebitNoteModel debitNote) throws UblSenderException {
String fileName = generateXmlFileName(debitNote);
Document document = getUblProvider(organization).getDocument(organization, debitNote);
try {
byte[] zip = generateZip(document, fileName);
// Call Web Service Operation
BillService_Service service = new BillService_Service();
service.setHandlerResolver(new UblHeaderHandlerResolver(organization.getUblSenderConfig()));
BillService port = service.getBillServicePort();
// Config data
DataSource dataSource = new ByteArrayDataSource(zip, "application/zip");
DataHandler contentFile = new DataHandler(dataSource);
// Send
byte[] result = port.sendBill(fileName + ".zip", contentFile);
} catch (TransformerException e) {
throw new UblSenderException(e);
} catch (IOException e) {
throw new UblSenderException(e);
}
}
private byte[] generateZip(Document document, String fileName) throws TransformerException, IOException {
byte[] bytes = DocumentUtils.getBytesFromDocument(document);
return ZipBuilder.createZipInMemory().addFolder("dummy/").add(bytes).path(fileName + ".xml").save().toBytes();
}
private UblProvider getUblProvider(OrganizationModel organization) {
return session.getProvider(UblProvider.class, organization.getDefaultUblLocale());
}
private void send(String fileName, byte[] file, String contentType) throws UblSenderException {
UblSenderProvider ublSender = session.getProvider(UblSenderProvider.class, "soap");
ublSender.send(organization, fileName, file, contentType);
}
private String generateXmlFileName(InvoiceModel invoice) throws UblSenderException {
if (organization.getAssignedIdentificationId() == null) {
throw new UblSenderException("Organization doesn't have assignedIdentificationId", new Throwable());
}
String codido;
if (invoice.getInvoiceTypeCode().equals(CodigoTipoDocumento.FACTURA.getCodigo())) {
codido = CodigoTipoDocumento.FACTURA.getCodigo();
} else if (invoice.getInvoiceTypeCode().equals(CodigoTipoDocumento.BOLETA.getCodigo())) {
codido = CodigoTipoDocumento.BOLETA.getCodigo();
} else {
throw new UblSenderException("Invalid invoice code", new Throwable());
}
StringBuilder sb = new StringBuilder();
sb.append(organization.getAssignedIdentificationId()).append("-");
sb.append(codido).append("-");
sb.append(invoice.getID());
return sb.toString();
}
private String generateXmlFileName(CreditNoteModel creditNote) throws UblSenderException {
if (organization.getAssignedIdentificationId() == null) {
throw new UblSenderException("Organization doesn't have assignedIdentificationId", new Throwable());
}
String codido = CodigoTipoDocumento.NOTA_CREDITO.getCodigo();
StringBuilder sb = new StringBuilder();
sb.append(organization.getAssignedIdentificationId()).append("-");
sb.append(codido).append("-");
sb.append(creditNote.getID());
sb.append(".xml");
return sb.toString();
}
private String generateXmlFileName(DebitNoteModel debitNote) throws UblSenderException {
if (organization.getAssignedIdentificationId() == null) {
throw new UblSenderException("Organization doesn't have assignedIdentificationId", new Throwable());
}
String codido = CodigoTipoDocumento.NOTA_DEBITO.getCodigo();
StringBuilder sb = new StringBuilder();
sb.append(organization.getAssignedIdentificationId()).append("-");
sb.append(codido).append("-");
sb.append(debitNote.getID());
sb.append(".xml");
return sb.toString();
}
}
| add credit note and debit notes | services/src/main/java/org/openfact/ubl/send/pe/UblTemplateProvider_PE.java | add credit note and debit notes | <ide><path>ervices/src/main/java/org/openfact/ubl/send/pe/UblTemplateProvider_PE.java
<ide> try {
<ide> byte[] zip = generateZip(document, fileName);
<ide>
<add> File file = new File("/home/admin/ubl/"+fileName+".zip");
<add> FileOutputStream fos = new FileOutputStream(file);
<add> fos.write(zip);
<add> fos.close();
<add>
<ide> // Call Web Service Operation
<ide> BillService_Service service = new BillService_Service();
<ide> service.setHandlerResolver(new UblHeaderHandlerResolver(organization.getUblSenderConfig()));
<ide> try {
<ide> byte[] zip = generateZip(document, fileName);
<ide>
<add> File file = new File("/home/admin/ubl/"+fileName+".zip");
<add> FileOutputStream fos = new FileOutputStream(file);
<add> fos.write(zip);
<add> fos.close();
<add>
<ide> // Call Web Service Operation
<ide> BillService_Service service = new BillService_Service();
<ide> service.setHandlerResolver(new UblHeaderHandlerResolver(organization.getUblSenderConfig()));
<ide> Document document = getUblProvider(organization).getDocument(organization, debitNote);
<ide> try {
<ide> byte[] zip = generateZip(document, fileName);
<add>
<add> File file = new File("/home/admin/ubl/"+fileName+".zip");
<add> FileOutputStream fos = new FileOutputStream(file);
<add> fos.write(zip);
<add> fos.close();
<ide>
<ide> // Call Web Service Operation
<ide> BillService_Service service = new BillService_Service();
<ide> sb.append(organization.getAssignedIdentificationId()).append("-");
<ide> sb.append(codido).append("-");
<ide> sb.append(creditNote.getID());
<del> sb.append(".xml");
<ide> return sb.toString();
<ide> }
<ide>
<ide> sb.append(organization.getAssignedIdentificationId()).append("-");
<ide> sb.append(codido).append("-");
<ide> sb.append(debitNote.getID());
<del> sb.append(".xml");
<ide> return sb.toString();
<ide> }
<ide> |
|
JavaScript | mit | aa27ae5261e465b4eeb30cfd4d9e37da19940dec | 0 | darkwing/aframe,jzitelli/aframe,aframevr/aframe-core,MatthewShotton/aframe,googlecreativelab/aframe,chenzlabs/aframe,msimpson/aframe,MatthewShotton/aframe,aframevr/aframe,ngokevin/aframe,fernandojsg/aframe,aframevr/aframe-core,jzitelli/aframe,RSpace/aframe,dmarcos/aframe,msimpson/aframe,mkungla/aframe,weddingdj/aframe,mkungla/aframe,jaxmolina/ar,wizgrav/aframe,chenzlabs/aframe,jaxmolina/ar,fernandojsg/aframe,darkwing/aframe,aframevr/aframe,dmarcos/aframe,ngokevin/aframe,RSpace/aframe,weddingdj/aframe,googlecreativelab/aframe,wizgrav/aframe | var coordinateParser = require('./coordinate-parser');
var registerComponent = require('../core/register-component').registerComponent;
var utils = require('../vr-utils');
// To avoid triggering a 0 determinat
// wich makes the object3D matrix non invertible
var zeroScale = 0.000000000001;
var proto = {
defaults: {
value: {
x: 1,
y: 1,
z: 1
}
},
update: {
value: function () {
var data = this.data;
var object3D = this.el.object3D;
var x = data.x === 0 ? zeroScale : data.x;
var y = data.y === 0 ? zeroScale : data.y;
var z = data.z === 0 ? zeroScale : data.z;
object3D.scale.set(x, y, z);
}
}
};
utils.mixin(proto, coordinateParser);
module.exports.Component = registerComponent('scale', proto);
| src/components/scale.js | var coordinateParser = require('./coordinate-parser');
var registerComponent = require('../core/register-component').registerComponent;
var utils = require('../vr-utils');
var proto = {
defaults: {
value: {
x: 1,
y: 1,
z: 1
}
},
update: {
value: function () {
var data = this.data;
var object3D = this.el.object3D;
object3D.scale.set(data.x, data.y, data.z);
}
}
};
utils.mixin(proto, coordinateParser);
module.exports.Component = registerComponent('scale', proto);
| If the scales values are 0 we apply a very small number instead to avoid zeroing the matrix determinants (fixes #432)
| src/components/scale.js | If the scales values are 0 we apply a very small number instead to avoid zeroing the matrix determinants (fixes #432) | <ide><path>rc/components/scale.js
<ide> var coordinateParser = require('./coordinate-parser');
<ide> var registerComponent = require('../core/register-component').registerComponent;
<ide> var utils = require('../vr-utils');
<add>
<add>// To avoid triggering a 0 determinat
<add>// wich makes the object3D matrix non invertible
<add>var zeroScale = 0.000000000001;
<ide>
<ide> var proto = {
<ide> defaults: {
<ide> value: function () {
<ide> var data = this.data;
<ide> var object3D = this.el.object3D;
<del> object3D.scale.set(data.x, data.y, data.z);
<add> var x = data.x === 0 ? zeroScale : data.x;
<add> var y = data.y === 0 ? zeroScale : data.y;
<add> var z = data.z === 0 ? zeroScale : data.z;
<add> object3D.scale.set(x, y, z);
<ide> }
<ide> }
<ide> }; |
|
Java | apache-2.0 | 97008cc4128da5f8612af5ff78644f2391f78152 | 0 | mikesamuel/code-interlingua,mikesamuel/code-interlingua | package com.mikesamuel.cil.parser;
/**
* Receives information about serialization failures.
* <p>
* It is normal for some errors to be raised as branches fail, so a good
* rule-of-thumb is to delay reporting any until the parse as a whole is known
* to fail and then report the error with the greatest index as most likely
* indicative of the underlying cause.
*/
public interface SerialErrorReceiver {
/** Called when an attempt to serialize a particular variant fails. */
void error(SerialState state, String message);
/** Silently ignores error reports. */
public static final SerialErrorReceiver DEV_NULL = new SerialErrorReceiver() {
@Override
public void error(SerialState state, String message) {
// Drop silently.
}
};
}
| src/main/java/com/mikesamuel/cil/parser/SerialErrorReceiver.java | package com.mikesamuel.cil.parser;
/**
* Receives information about serialization failures.
* <p>
* It is normal for some errors to be raised as branches fail, so a good
* rule-of-thumb is to delay reporting any until the parse as a whole is known
* to fail and then report the error with the greatest index as most likely
* indicative of the underlying cause.
*/
public interface SerialErrorReceiver {
/** Called when an attempt to serialize a particular variant fails. */
void error(SerialState state, String message);
}
| added a do-nothing instance for use by test cases
| src/main/java/com/mikesamuel/cil/parser/SerialErrorReceiver.java | added a do-nothing instance for use by test cases | <ide><path>rc/main/java/com/mikesamuel/cil/parser/SerialErrorReceiver.java
<ide> * indicative of the underlying cause.
<ide> */
<ide> public interface SerialErrorReceiver {
<add>
<ide> /** Called when an attempt to serialize a particular variant fails. */
<ide> void error(SerialState state, String message);
<add>
<add> /** Silently ignores error reports. */
<add> public static final SerialErrorReceiver DEV_NULL = new SerialErrorReceiver() {
<add>
<add> @Override
<add> public void error(SerialState state, String message) {
<add> // Drop silently.
<add> }
<add>
<add> };
<add>
<ide> } |
|
Java | bsd-3-clause | 3eb369c12dc88c6897064dc9a5be958826c82642 | 0 | marcusthebrown/geogig,jodygarnett/GeoGig,jdgarrett/geogig,mtCarto/geogig,jdgarrett/geogig,jodygarnett/GeoGig,jdgarrett/geogig,mtCarto/geogig,marcusthebrown/geogig,jodygarnett/GeoGig,mtCarto/geogig,marcusthebrown/geogig | /* Copyright (c) 2015 Boundless.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Distribution License v1.0
* which accompanies this distribution, and is available at
* https://www.eclipse.org/org/documents/edl-v10.html
*
* Contributors:
* Gabriel Roldan (Boundless) - initial implementation
*/
package org.locationtech.geogig.cli;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import org.fusesource.jansi.WindowsAnsiOutputStream;
/**
* Represents the console (i.e. operating system terminal) from where the program is being executed,
* allowing to read from the terminal's stdin and write to the terminal's stdout.
* <p>
* Additionally, a different pair of input and output streams can be provided explicitly to simulate
* redirection, for example for unit tests.
*
*/
public class Console {
private static final char CARRIAGE_RETURN = '\r';
private StringBuffer cursorBuffer;
@SuppressWarnings("unused")
private InputStream in;
private PrintStream out;
private boolean ansiEnabled;
private boolean ansiSupported;
/**
* Creates a console reader that reads from {@code sdtin} and writes to {@code sdout}.
*/
public Console() {
this(System.in, System.out);
}
/**
* Creates a Console that reads from and writes to the provided streams.
*
* @param in the console's input stream
* @param out the console's output stream
*/
public Console(InputStream in, OutputStream out) {
this.in = in;
this.cursorBuffer = new StringBuffer();
this.ansiEnabled = true;
this.ansiSupported = checkAnsiSupported(out);
if (out instanceof PrintStream) {
this.out = (PrintStream) out;
} else {
boolean autoFlush = true;
this.out = new PrintStream(out, autoFlush);
}
}
/**
* Returns whether writing ANSI escape sequences are supported by the console.
* <p>
* {@code true} will only be returned if the console's output is not redirected to a file or
* piped, this console's output stream is "stdout", and the JVM has been invoked from a terminal
* that supports ANSI codes.
* <p>
* If {@link #disableAnsi()} has been called, returns {@code false} immediately.
*
* @return {@code true} if ANSI terminal color codes are supported by the console, {@code false}
* otherwise.
*/
public boolean isAnsiSupported() {
return ansiEnabled && ansiSupported;
}
private static boolean checkAnsiSupported(OutputStream out) {
if (out != System.out) {
return false;
}
if (System.console() == null) {
return false;
}
final String osname = System.getProperty("os.name");
if (osname.toLowerCase().startsWith("windows")) {
try {
new WindowsAnsiOutputStream(out);
} catch (Throwable e) {
// The required Windows native lib is not available
return false;
}
}
return true;
}
/**
* Disables ANSI terminal color support, regardless of the auto-detection performed by
* {@link #isAnsiSupported()}.
*
* @return {@code this}
*/
public Console disableAnsi() {
this.ansiEnabled = false;
return this;
}
/**
* Writes the given char sequence to the cursor buffer, does not flush the buffer.
*
* @param s the character sequence to write to the console
* @throws IOException
*/
public void print(CharSequence s) throws IOException {
cursorBuffer.append(s);
}
/**
* Print a new line, flushing the cursor buffer.
*
* @throws IOException
*/
public void println() throws IOException {
println("");
}
/**
* Prints the {@code line} text to the console and starts a new line, flushing the cursor
* buffer.
*
* @param line the text to write
* @throws IOException
*/
public void println(CharSequence line) throws IOException {
print(line);
cursorBuffer.append("\n");
flush();
}
/**
* Forces flushing the cursor buffer to the console's output stream.
*
* @throws IOException
*/
public void flush() throws IOException {
String s = cursorBuffer.toString();
out.print(s);
clearBuffer();
}
/**
* Moves the console cursor to the beginning of the line (prints a carriage return {@code '\r'}
* character) and redraws the contents of the cursor buffer.
*
* @throws IOException
*/
public void redrawLine() throws IOException {
cursorBuffer.append(CARRIAGE_RETURN);
flush();
}
/**
* Clear the console's un-flushed buffer
*/
public void clearBuffer() {
this.cursorBuffer.setLength(0);
}
}
| src/cli/src/main/java/org/locationtech/geogig/cli/Console.java | /* Copyright (c) 2015 Boundless.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Distribution License v1.0
* which accompanies this distribution, and is available at
* https://www.eclipse.org/org/documents/edl-v10.html
*
* Contributors:
* Gabriel Roldan (Boundless) - initial implementation
*/
package org.locationtech.geogig.cli;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import org.fusesource.jansi.WindowsAnsiOutputStream;
/**
* Represents the console (i.e. operating system terminal) from where the program is being executed,
* allowing to read from the terminal's stdin and write to the terminal's stdout.
* <p>
* Additionally, a different pair of input and output streams can be provided explicitly to simulate
* redirection, for example for unit tests.
*
*/
public class Console {
private static final char CARRIAGE_RETURN = '\r';
private StringBuffer cursorBuffer;
@SuppressWarnings("unused")
private InputStream in;
private OutputStream out;
private boolean ansiEnabled;
private boolean ansiSupported;
/**
* Creates a console reader that reads from {@code sdtin} and writes to {@code sdout}.
*/
public Console() {
this(System.in, System.out);
}
/**
* Creates a Console that reads from and writes to the provided streams.
*
* @param in the console's input stream
* @param out the console's output stream
*/
public Console(InputStream in, OutputStream out) {
this.in = in;
this.out = out;
this.cursorBuffer = new StringBuffer();
this.ansiEnabled = true;
this.ansiSupported = checkAnsiSupported(out);
}
/**
* Returns whether writing ANSI escape sequences are supported by the console.
* <p>
* {@code true} will only be returned if the console's output is not redirected to a file or
* piped, this console's output stream is "stdout", and the JVM has been invoked from a terminal
* that supports ANSI codes.
* <p>
* If {@link #disableAnsi()} has been called, returns {@code false} immediately.
*
* @return {@code true} if ANSI terminal color codes are supported by the console, {@code false}
* otherwise.
*/
public boolean isAnsiSupported() {
return ansiEnabled && ansiSupported;
}
private static boolean checkAnsiSupported(OutputStream out) {
if (out != System.out) {
return false;
}
if (System.console() == null) {
return false;
}
final String osname = System.getProperty("os.name");
if (osname.toLowerCase().startsWith("windows")) {
try {
new WindowsAnsiOutputStream(out);
} catch (Throwable e) {
// The required Windows native lib is not available
return false;
}
}
return true;
}
/**
* Disables ANSI terminal color support, regardless of the auto-detection performed by
* {@link #isAnsiSupported()}.
*
* @return {@code this}
*/
public Console disableAnsi() {
this.ansiEnabled = false;
return this;
}
/**
* Writes the given char sequence to the cursor buffer, does not flush the buffer.
*
* @param s the character sequence to write to the console
* @throws IOException
*/
public void print(CharSequence s) throws IOException {
cursorBuffer.append(s);
}
/**
* Print a new line, flushing the cursor buffer.
*
* @throws IOException
*/
public void println() throws IOException {
println("");
}
/**
* Prints the {@code line} text to the console and starts a new line, flushing the cursor
* buffer.
*
* @param line the text to write
* @throws IOException
*/
public void println(CharSequence line) throws IOException {
print(line);
cursorBuffer.append("\n");
flush();
}
/**
* Forces flushing the cursor buffer to the console's output stream.
*
* @throws IOException
*/
public void flush() throws IOException {
String s = cursorBuffer.toString();
PrintStream out;
if (this.out instanceof PrintStream) {
out = (PrintStream) this.out;
} else {
out = new PrintStream(this.out);
}
out.print(s);
clearBuffer();
}
/**
* Moves the console cursor to the beginning of the line (prints a carriage return {@code '\r'}
* character) and redraws the contents of the cursor buffer.
*
* @throws IOException
*/
public void redrawLine() throws IOException {
cursorBuffer.append(CARRIAGE_RETURN);
flush();
}
/**
* Clear the console's un-flushed buffer
*/
public void clearBuffer() {
this.cursorBuffer.setLength(0);
}
}
| Make CLI's Console have a PrintStream instance variable instead of creating it on flush()
| src/cli/src/main/java/org/locationtech/geogig/cli/Console.java | Make CLI's Console have a PrintStream instance variable instead of creating it on flush() | <ide><path>rc/cli/src/main/java/org/locationtech/geogig/cli/Console.java
<ide> @SuppressWarnings("unused")
<ide> private InputStream in;
<ide>
<del> private OutputStream out;
<add> private PrintStream out;
<ide>
<ide> private boolean ansiEnabled;
<ide>
<ide> */
<ide> public Console(InputStream in, OutputStream out) {
<ide> this.in = in;
<del> this.out = out;
<ide> this.cursorBuffer = new StringBuffer();
<ide> this.ansiEnabled = true;
<ide> this.ansiSupported = checkAnsiSupported(out);
<add> if (out instanceof PrintStream) {
<add> this.out = (PrintStream) out;
<add> } else {
<add> boolean autoFlush = true;
<add> this.out = new PrintStream(out, autoFlush);
<add> }
<ide> }
<ide>
<ide> /**
<ide> */
<ide> public void flush() throws IOException {
<ide> String s = cursorBuffer.toString();
<del> PrintStream out;
<del> if (this.out instanceof PrintStream) {
<del> out = (PrintStream) this.out;
<del> } else {
<del> out = new PrintStream(this.out);
<del> }
<ide> out.print(s);
<ide> clearBuffer();
<ide> } |
|
JavaScript | mit | 3009a8c50ea1a83959fb1a1e79ca278d2b86a980 | 0 | thecodebureau/ridge | views/model-controls.js | var fncs = {
cancel: function(model) {
$(this)[model.isNew() || !model.collection ? 'hide' : 'show']().prop(this.disabled, false);
},
publish: function(model) {
$(this)[!model.isNew() && !model.get('datePublished') ? 'show' : 'hide']().prop('disabled', model.isDirty());
},
unpublish: function(model) {
$(this)[!model.isNew() && model.get('datePublished') ? 'show' : 'hide']().prop('disabled', model.isDirty());
},
delete: function(model) {
this.disabled = model.isNew() || model.isDirty();
},
create: function(model) {
$(this)[model.isNew() ? 'show' : 'hide']().prop('disabled', !model.isDirty());
},
save: function(model) {
$(this)[!model.isNew() ? 'show' : 'hide']().prop('disabled', !model.isDirty());
},
reset: function(model) {
$(this).prop('disabled', !model.isDirty());
},
block: function(model) {
$(this).prop('disabled', false);
}
};
var app = require('ridge');
module.exports = require('ridge/view').extend({
events: {
'click button': 'stopPropagation',
'click button[data-command="block"]': 'block',
'click button[data-command="cancel"]': 'cancel',
'click button[data-command="create"]': 'create',
'click button[data-command="publish"]': 'publish',
'click button[data-command="reset"]': 'reset',
'click button[data-command="delete"]': 'delete',
'click button[data-command="save"]': 'save',
'click button[data-command="unpublish"]': 'unpublish'
},
initialize: function(options) {
this.setModel(options.model, false);
},
attach: function(update) {
var _view = this;
this.fncs = {};
_view.$('button').each(function() {
var command = $(this).data('command');
_view.fncs[command] = fncs[command].bind(this);
});
if(update !== false && this.model)
_view.update();
},
block: function() {
this.model.save({ isBlocked: true }, { patch: true, wait: true });
},
setModel: function(model, update) {
var _view = this;
if(_view.model)
_view.stopListening();
if(model) {
_view.model = model;
_view.listenTo(_view.model, 'change sync cancel', _view.update);
} else {
_view.model = null;
}
if(update !== false)
_view.update();
},
// Commands:
stopPropagation: function(e) {
e.stopPropagation();
},
update: function() {
var _view = this;
_.each(_view.fncs, function(fnc, name) {
fnc(_view.model);
});
},
unpublish: function(e) {
this.model.save({ datePublished: null }, { patch: true, wait: true });
},
publish: function(e) {
this.model.save({ datePublished: new Date() }, { patch: true, wait: true });
},
cancel: function() {
if(!this.model.isDirty() || confirm('Are you sure you want to cancel?'))
this.model.trigger('cancel').fetch();
},
edit: function(e) {
var _view = this;
e.preventDefault();
_view.model.trigger('edit', _view.model);
},
create: function() {
if(this.collection) {
this.collection.add(this.model);
}
this.model.save(null, {
success: function(model, response, opts) {
var path = _.initial(window.location.pathname.split('/')).join('/') + '/' + model.id;
app.router.navigate(path, { replace: true });
}
});
},
save: function() {
this.model.save();
},
reset: function() {
if(confirm('Are you sure you want to reset?')) {
this.model.reset();
}
},
});
| Removed view/model-controls.js (moved to hats/admin).
| views/model-controls.js | Removed view/model-controls.js (moved to hats/admin). | <ide><path>iews/model-controls.js
<del>var fncs = {
<del> cancel: function(model) {
<del> $(this)[model.isNew() || !model.collection ? 'hide' : 'show']().prop(this.disabled, false);
<del> },
<del> publish: function(model) {
<del> $(this)[!model.isNew() && !model.get('datePublished') ? 'show' : 'hide']().prop('disabled', model.isDirty());
<del> },
<del> unpublish: function(model) {
<del> $(this)[!model.isNew() && model.get('datePublished') ? 'show' : 'hide']().prop('disabled', model.isDirty());
<del> },
<del> delete: function(model) {
<del> this.disabled = model.isNew() || model.isDirty();
<del> },
<del> create: function(model) {
<del> $(this)[model.isNew() ? 'show' : 'hide']().prop('disabled', !model.isDirty());
<del> },
<del> save: function(model) {
<del> $(this)[!model.isNew() ? 'show' : 'hide']().prop('disabled', !model.isDirty());
<del> },
<del> reset: function(model) {
<del> $(this).prop('disabled', !model.isDirty());
<del> },
<del> block: function(model) {
<del> $(this).prop('disabled', false);
<del> }
<del>};
<del>
<del>var app = require('ridge');
<del>
<del>module.exports = require('ridge/view').extend({
<del> events: {
<del> 'click button': 'stopPropagation',
<del> 'click button[data-command="block"]': 'block',
<del> 'click button[data-command="cancel"]': 'cancel',
<del> 'click button[data-command="create"]': 'create',
<del> 'click button[data-command="publish"]': 'publish',
<del> 'click button[data-command="reset"]': 'reset',
<del> 'click button[data-command="delete"]': 'delete',
<del> 'click button[data-command="save"]': 'save',
<del> 'click button[data-command="unpublish"]': 'unpublish'
<del> },
<del>
<del> initialize: function(options) {
<del> this.setModel(options.model, false);
<del> },
<del>
<del> attach: function(update) {
<del> var _view = this;
<del>
<del> this.fncs = {};
<del>
<del> _view.$('button').each(function() {
<del> var command = $(this).data('command');
<del>
<del> _view.fncs[command] = fncs[command].bind(this);
<del> });
<del>
<del> if(update !== false && this.model)
<del> _view.update();
<del> },
<del>
<del> block: function() {
<del> this.model.save({ isBlocked: true }, { patch: true, wait: true });
<del> },
<del>
<del> setModel: function(model, update) {
<del> var _view = this;
<del>
<del> if(_view.model)
<del> _view.stopListening();
<del>
<del> if(model) {
<del> _view.model = model;
<del>
<del> _view.listenTo(_view.model, 'change sync cancel', _view.update);
<del> } else {
<del> _view.model = null;
<del> }
<del>
<del> if(update !== false)
<del> _view.update();
<del> },
<del>
<del> // Commands:
<del>
<del> stopPropagation: function(e) {
<del> e.stopPropagation();
<del> },
<del>
<del> update: function() {
<del> var _view = this;
<del>
<del> _.each(_view.fncs, function(fnc, name) {
<del> fnc(_view.model);
<del> });
<del> },
<del>
<del> unpublish: function(e) {
<del> this.model.save({ datePublished: null }, { patch: true, wait: true });
<del> },
<del>
<del> publish: function(e) {
<del> this.model.save({ datePublished: new Date() }, { patch: true, wait: true });
<del> },
<del>
<del> cancel: function() {
<del> if(!this.model.isDirty() || confirm('Are you sure you want to cancel?'))
<del> this.model.trigger('cancel').fetch();
<del> },
<del>
<del> edit: function(e) {
<del> var _view = this;
<del>
<del> e.preventDefault();
<del>
<del> _view.model.trigger('edit', _view.model);
<del> },
<del>
<del> create: function() {
<del> if(this.collection) {
<del> this.collection.add(this.model);
<del> }
<del>
<del> this.model.save(null, {
<del> success: function(model, response, opts) {
<del> var path = _.initial(window.location.pathname.split('/')).join('/') + '/' + model.id;
<del> app.router.navigate(path, { replace: true });
<del> }
<del> });
<del> },
<del>
<del> save: function() {
<del> this.model.save();
<del> },
<del>
<del> reset: function() {
<del> if(confirm('Are you sure you want to reset?')) {
<del> this.model.reset();
<del> }
<del> },
<del>}); |
||
JavaScript | mit | d6361f733a36c2495f3e7a412656e0cac9940de2 | 0 | BernhardRode/socket.io-client,socketio/socket.io-client,socketio/socket.io-client,philikon/socket.io-client,clshortfuse/socket.io-client,poqdavid/socket.io-client,lattmann/socket.io-client,toddself/socket.io-client,fhaoquan/socket.io-client,ayhansalami/socket.io-client,Creeplays/socket.io-client,goodwall/socket.io-client,dkhunt27/socket.io-client,Nibbler999/socket.io-client,AnyPresence/socket.io-client,YonasBerhe/socket.io-client,IbpTeam/node-socket.io-client,kambisports/socket.io-client,SmartestEdu/socket.io-client,wxkdesky/socket.io-client,paramburu/socket.io-client,mcanthony/socket.io-client,Alexandre-io/socket.io-client,ReikoR/socket.io-client,the1sky/socket.io-client,iFixit/socket.io-client,sanemat/socket.io-client,liyuan-rey/socket.io-client,paladox/socket.io-client,VictorQueiroz/socket.io-client,eagleeye/socket.io-client,Jonavin/socket.io-client,skerit/socket.io-client,lsm/socket.io-client,andreipetcu/socket.io-client,itadvisors/socket.io-client,nkzawa/socket.io-client,gsklee/socket.io-client,zhanglingkang/socket.io-client,watnotte/socket.io-client,marcelaraujo/socket.io-client,JohnyDays/socket.io-client,CodeKingdomsTeam/socket.io-client,ivkodenis92/socket.io-client,nus-fboa2016-si/socket.io-client,azlyth/socket.io-client,juancancela/socket.io-client,amosworker/socket.io-client,hellpf/socket.io-client,pjump/socket.io-client,enounca/socket.io-client,socketio/socket.io-client,juancarloscancela/socket.io-client,bcng/socket.io-client,Automattic/socket.io-client |
/**
* socket.io
* Copyright(c) 2011 LearnBoost <[email protected]>
* MIT Licensed
*/
(function (exports, io) {
/**
* Expose constructor.
*/
exports.SocketNamespace = SocketNamespace;
/**
* Socket namespace constructor.
*
* @constructor
* @api public
*/
function SocketNamespace (socket, name) {
this.socket = socket;
this.name = name || '';
this.flags = {};
this.json = new Flag(this, 'json');
this.ackPackets = 0;
this.acks = {};
};
/**
* Apply EventEmitter mixin.
*/
io.util.mixin(SocketNamespace, io.EventEmitter);
/**
* Copies emit since we override it
*
* @api private
*/
SocketNamespace.prototype.$emit = io.EventEmitter.prototype.emit;
/**
* Sends a packet.
*
* @api private
*/
SocketNamespace.prototype.packet = function (packet) {
packet.endpoint = this.name;
this.socket.packet(packet);
this.flags = {};
return this;
};
/**
* Sends a message
*
* @api public
*/
SocketNamespace.prototype.send = function (data, fn) {
var packet = {
type: this.flags.json ? 'json' : 'message'
, data: data
};
if ('function' == typeof fn) {
packet.id = ++this.ackPackets;
packet.ack = fn.length ? 'data' : true;
this.acks[packet.id] = fn;
}
return this.packet(packet);
};
/**
* Emits an event
*
* @api public
*/
SocketNamespace.prototype.emit = function (name) {
var args = Array.prototype.slice.call(arguments, 1)
, lastArg = args[args.length - 1]
, packet = {
type: 'event'
, name: name
};
if ('function' == typeof lastArg) {
packet.id = ++this.ackPackets;
packet.ack = lastArg.length ? 'data' : true;
this.acks[packet.id] = lastArg;
args = args.slice(0, args.length - 1);
}
packet.args = args;
return this.packet(packet);
};
/**
* Disconnects the namespace
*
* @api private
*/
SocketNamespace.prototype.disconnect = function () {
if (this.name === '') {
this.socket.disconnect();
} else {
this.packet({ type: 'disconnect' });
this.$emit('disconnect');
}
return this;
};
/**
* Handles a packet
*
* @api private
*/
SocketNamespace.prototype.onPacket = function (packet) {
var self = this;
function ack () {
self.packet({
type: 'ack'
, args: io.util.toArray(arguments)
, ackId: packet.id
});
};
switch (packet.type) {
case 'connect':
this.$emit('connect');
break;
case 'disconnect':
if (this.name === '') {
this.socket.onDisconnect(packet.reason || 'booted');
} else {
this.$emit('disconnect', packet.reason);
}
break;
case 'message':
case 'json':
var params = ['message', packet.data];
if (packet.ack == 'data') {
params.push(ack);
} else if (packet.ack) {
this.packet({ type: 'ack', ackId: packet.id });
}
this.$emit.apply(this, params);
break;
case 'event':
var params = [packet.name].concat(packet.args);
if (packet.ack == 'data')
params.push(ack);
this.$emit.apply(this, params);
break;
case 'ack':
if (this.acks[packet.ackId]) {
this.acks[packet.ackId].apply(this, packet.args);
delete this.acks[packet.ackId];
}
break;
case 'error':
if (packet.advice){
this.socket.onError(packet);
} else {
this.$emit('error', packet.reason);
}
break;
}
};
/**
* Flag interface.
*
* @api private
*/
function Flag (nsp, name) {
this.namespace = nsp;
this.name = name;
};
/**
* Send a message
*
* @api public
*/
Flag.prototype.send = function () {
this.namespace.flags[this.name] = true;
this.namespace.send.apply(this.namespace, arguments);
};
/**
* Emit an event
*
* @api public
*/
Flag.prototype.emit = function () {
this.namespace.flags[this.name] = true;
this.namespace.emit.apply(this.namespace, arguments);
};
})(
'undefined' != typeof io ? io : module.exports
, 'undefined' != typeof io ? io : module.parent.exports
);
| lib/namespace.js |
/**
* socket.io
* Copyright(c) 2011 LearnBoost <[email protected]>
* MIT Licensed
*/
(function (exports, io) {
/**
* Expose constructor.
*/
exports.SocketNamespace = SocketNamespace;
/**
* Socket namespace constructor.
*
* @constructor
* @api public
*/
function SocketNamespace (socket, name) {
this.socket = socket;
this.name = name || '';
this.flags = {};
this.json = new Flag(this, 'json');
this.ackPackets = 0;
this.acks = {};
};
/**
* Apply EventEmitter mixin.
*/
io.util.mixin(SocketNamespace, io.EventEmitter);
/**
* Copies emit since we override it
*
* @api private
*/
SocketNamespace.prototype.$emit = io.EventEmitter.prototype.emit;
/**
* Sends a packet.
*
* @api private
*/
SocketNamespace.prototype.packet = function (packet) {
packet.endpoint = this.name;
this.socket.packet(packet);
this.flags = {};
return this;
};
/**
* Sends a message
*
* @api public
*/
SocketNamespace.prototype.send = function (data, fn) {
var packet = {
type: this.flags.json ? 'json' : 'message'
, data: data
};
if ('function' == typeof fn) {
packet.id = ++this.ackPackets;
packet.ack = fn.length ? 'data' : true;
this.acks[packet.id] = fn;
}
return this.packet(packet);
};
/**
* Emits an event
*
* @api public
*/
SocketNamespace.prototype.emit = function (name) {
var args = Array.prototype.slice.call(arguments, 1)
, lastArg = args[args.length - 1]
, packet = {
type: 'event'
, name: name
};
if ('function' == typeof lastArg) {
packet.id = ++this.ackPackets;
packet.ack = lastArg.length ? 'data' : true;
this.acks[packet.id] = lastArg;
args = args.slice(0, args.length - 1);
}
packet.args = args;
return this.packet(packet);
};
/**
* Disconnects the namespace
*
* @api private
*/
SocketNamespace.prototype.disconnect = function () {
if (this.name === '') {
this.socket.disconnect();
} else {
this.packet({ type: 'disconnect' });
this.$emit('disconnect');
}
return this;
};
/**
* Handles a packet
*
* @api private
*/
SocketNamespace.prototype.onPacket = function (packet) {
var self = this;
function ack () {
self.packet({
type: 'ack'
, args: io.util.toArray(arguments)
, ackId: packet.id
});
};
switch (packet.type) {
case 'connect':
this.$emit('connect');
break;
case 'disconnect':
if (this.name === '') {
this.socket.onDisconnect(packet.reason || 'booted');
} else {
this.$emit('disconnect', packet.reason);
}
break;
case 'message':
case 'json':
var params = ['message', packet.data];
if (packet.ack == 'data') {
params.push(ack);
} else if (packet.ack) {
this.packet({ type: 'ack', ackId: packet.id });
}
this.$emit.apply(this, params);
break;
case 'event':
var params = [packet.name].concat(packet.args);
if (packet.ack == 'data')
params.push(ack);
this.$emit.apply(this, params);
break;
case 'ack':
if (this.acks[packet.ackId]) {
this.acks[packet.ackId].apply(this, packet.args);
delete this.acks[packet.ackId];
}
case 'error':
if (packet.advice){
this.socket.onError(packet);
} else {
this.$emit('error', packet.reason);
}
}
};
/**
* Flag interface.
*
* @api private
*/
function Flag (nsp, name) {
this.namespace = nsp;
this.name = name;
};
/**
* Send a message
*
* @api public
*/
Flag.prototype.send = function () {
this.namespace.flags[this.name] = true;
this.namespace.send.apply(this.namespace, arguments);
};
/**
* Emit an event
*
* @api public
*/
Flag.prototype.emit = function () {
this.namespace.flags[this.name] = true;
this.namespace.emit.apply(this.namespace, arguments);
};
})(
'undefined' != typeof io ? io : module.exports
, 'undefined' != typeof io ? io : module.parent.exports
);
| added missing breaks; the `ack` packet also emitted the onerror
| lib/namespace.js | added missing breaks; the `ack` packet also emitted the onerror | <ide><path>ib/namespace.js
<ide> this.acks[packet.ackId].apply(this, packet.args);
<ide> delete this.acks[packet.ackId];
<ide> }
<add> break;
<ide>
<ide> case 'error':
<ide> if (packet.advice){
<ide> } else {
<ide> this.$emit('error', packet.reason);
<ide> }
<add> break;
<ide> }
<ide> };
<ide> |
|
JavaScript | mit | 15c5d2a1df4482e3b381178393778c8d4ffa42f4 | 0 | akesterson/moonlight-skulk,akesterson/moonlight-skulk,akesterson/moonlight-skulk | SPEED_WALKING = 8;
SPEED_RUNNING = 14;
// Millisecond durations per tweens, per tile
TWEEN_DURATION_PERPIXEL_RUNNING = 5;
TWEEN_DURATION_PERPIXEL_WALKING = 9;
TWEEN_DURATION_PERTILE_RUNNING = TWEEN_DURATION_PERPIXEL_RUNNING * 32;
TWEEN_DURATION_PERTILE_WALKING = TWEEN_DURATION_PERPIXEL_WALKING * 32;
STATE_NONE = 0;
STATE_UNAWARE = 1 << 1;
STATE_CONCERNED = 1 << 2;
STATE_ALERTED = 1 << 3;
STATE_LOSTHIM = 1 << 4;
STATE_RUNNING = 1 << 5;
STATE_FACE_LEFT = 1 << 6;
STATE_FACE_RIGHT = 1 << 7;
STATE_FACE_UP = 1 << 8;
STATE_FACE_DOWN = 1 << 9;
STATE_MOVING = 1 << 10;
STATES_AWARENESS = (STATE_UNAWARE | STATE_CONCERNED | STATE_ALERTED | STATE_LOSTHIM);
STATES_MOVEMENT = (STATE_MOVING | STATE_RUNNING);
STATES_FACE = (STATE_FACE_LEFT | STATE_FACE_RIGHT | STATE_FACE_DOWN | STATE_FACE_UP);
SPRITE_TOWNSFOLK_MALE = 1;
SPRITE_TOWNSFOLK_FEMALE = 2;
SPRITE_TOWNSFOLK_GUARD = 3;
SPRITE_TOWNSFOLK_MALE1 = 1;
SPRITE_TOWNSFOLK_MALE2 = 2;
SPRITE_TOWNSFOLK_MALE3 = 3;
SPRITE_TOWNSFOLK_MALE4 = 4;
SPRITE_TOWNSFOLK_FEMALE1 = 5;
SPRITE_TOWNSFOLK_FEMALE2 = 6;
SPRITE_TOWNSFOLK_FEMALE3 = 7;
SPRITE_TOWNSFOLK_FEMALE4 = 8;
SPRITE_TOWNSFOLK_GUARD1 = 9;
SPRITE_TOWNSFOLK_GUARD2 = 10;
var pathfinder = null;
var pathfinder_grid = null;
var game = new Phaser.Game(640, 480, Phaser.AUTO, '');
// Create torch objects
// Light constructor
var Light = function(game, x, y, key, frame, radius, fade, color_start, color_stop, flicker, always_render, light_meter) {
color_start = ( typeof color_start == undefined ? color_start : 'rgba(255, 255, 255, 1.0)');
color_stop = ( typeof color_stop == undefined ? color_stop : 'rgba(255, 255, 255, 0.0)');
fade = ( typeof fade == undefined ? fade : 0.25);
radius = ( typeof radius == undefined ? radius : 64);
flicker = ( typeof flicker == undefined ? flicker : false);
always_render = ( typeof always_render == undefined ? always_render : false);
light_meter = ( typeof light_meter == undefined ? light_meter : 1.0 );
Phaser.Sprite.call(this, game, x, y, null);
// Set the pivot point for this sprite to the center
this.anchor.setTo(0.5, 0.5);
this.color_start = color_start;
this.color_stop = color_stop;
this.radius = radius;
this.rendered_radius = radius;
this.fade = radius * fade
this.light_meter = light_meter;
this.always_render = always_render
this.rect = positiveRectangle(this.x - radius, this.y - radius, radius * 2, radius * 2)
this.flicker = flicker;
};
// Lightes are a type of Phaser.Sprite
Light.prototype = Object.create(Phaser.Sprite.prototype);
Light.prototype.constructor = Light;
Light.prototype.update_new_values = function() {
this.light_meter = Number(this.light_meter);
this.radius = parseInt(this.radius);
this.fade = this.radius * Number(this.fade);
this.flicker = parseBoolean(this.flicker);
this.always_render = parseBoolean(this.always_render);
this.rect = positiveRectangle(this.x - this.radius, this.y - this.radius, this.radius * 2, this.radius * 2)
}
function SoundSprite(game, x, y, key, frame,
sound_key,
sound_marker,
sound_position,
sound_volume,
sound_loop,
sound_forcerestart,
sound_distance,
sound_nofade)
{
Phaser.Sprite.call(this, game, x, y, null);
this.sound_key = sound_key;
this.sound_marker = ( typeof sound_marker == undefined ? sound_marker : '');
this.sound_volume = ( typeof sound_volume == undefined ? sound_volume : 1.0 );
this.sound_position = ( typeof sound_position == undefined ? sound_position : 1.0 );
this.sound_loop = ( typeof sound_loop == undefined ? sound_loop : true );
this.sound_forcerestart = ( typeof sound_forcerestart == undefined ? sound_forcerestart : false );
var def_distance = Math.sqrt(
Number((game.camera.width/2) * (game.camera.width/2)) +
Number((game.camera.height/2) * (game.camera.height/2))
);
this.sound_distance = ( typeof sound_distance == undefined ? sound_distance : def_distance);
this.sound_nofade = (typeof sound_nofade == undefined ? sound_nofade : false);
this.sound = null;
}
SoundSprite.prototype = Object.create(Phaser.Sprite.prototype);
SoundSprite.prototype.constructor = Light;
SoundSprite.prototype.update_new_values = function() {
if ( this.sound_key == null ) {
if ( this.sound !== null ) {
this.sound.stop();
}
return;
}
this.sound_position = parseInt(this.sound_position);
this.sound_distance = Number(this.sound_distance);
this.sound_volume = Number(this.sound_volume);
this.sound_loop = parseBoolean(this.sound_loop);
this.sound_forcerestart = parseBoolean(this.sound_forcerestart);
this.sound_nofade = parseBoolean(this.sound_nofade);
if ( this.sound !== null )
this.sound.stop();
this.sound = game.add.audio(this.sound_key, this.sound_volume, this.sound_loop);
this.sound.play(
this.sound_marker,
this.sound_position,
this.sound_volume,
this.sound_loop,
this.sound_forcerestart);
}
SoundSprite.prototype.adjust_relative_to = function(spr) {
if ( this.sound_nofade == true ) {
this.sound.volume = this.sound_volume;
return;
}
// The volume of any given sound is equal to the length of the
// hypotenuse of a triangle drawn from the point (p) to the
// sprite in question
var xd = (spr.x - this.x);
if ( xd < 0 )
xd = -(xd);
var yd = (spr.y - this.y);
if ( yd < 0 )
yd = -(yd);
var hyp = Math.sqrt(Number(xd * xd) + Number(yd * yd));
this.sound.volume = (1.0 - Number(hyp / this.sound_distance));
// Math.max doesn't work here??
if ( this.sound.volume < 0 )
this.sound.volume = 0;
}
var moonlightSettings = {
'map' : {
'tilesets': [
{
'name': 'bigtop',
'path': 'gfx/tiles/bigtop.png'
},
{
'name': '002-Woods01',
'path': 'gfx/tiles/002-Woods01.png'
},
{
'name': '009-CastleTown01',
'path': 'gfx/tiles/009-CastleTown01.png'
},
{
'name': '010-CastleTown02',
'path': 'gfx/tiles/010-CastleTown02.png'
},
{
'name': '025-Castle01',
'path': 'gfx/tiles/025-Castle01.png'
},
{
'name': '026-Castle02',
'path': 'gfx/tiles/026-Castle02.png'
},
{
'name': '027-Castle03',
'path': 'gfx/tiles/027-Castle03.png'
},
{
'name': '027-Castle03',
'path': 'gfx/tiles/027-Castle03.png'
},
{
'name': '028-Church01',
'path': 'gfx/tiles/028-Church01.png'
},
{
'name': '029-Church02',
'path': 'gfx/tiles/029-Church02.png'
},
{
'name': '034-Bridge01',
'path': 'gfx/tiles/034-Bridge01.png'
},
{
'name': '035-Ruins01',
'path': 'gfx/tiles/035-Ruins01.png'
},
{
'name': '037-Fort01',
'path': 'gfx/tiles/037-Fort01.png'
},
{
'name': '038-Fort02',
'path': 'gfx/tiles/038-Fort02.png'
},
{
'name': '039-Tower01',
'path': 'gfx/tiles/039-Tower01.png'
},
{
'name': '040-Tower02',
'path': 'gfx/tiles/040-Tower02.png'
},
{
'name': '041-EvilCastle01',
'path': 'gfx/tiles/041-EvilCastle01.png'
},
{
'name': '042-EvilCastle02',
'path': 'gfx/tiles/042-EvilCastle02.png'
},
{
'name': '048-Sewer01',
'path': 'gfx/tiles/048-Sewer01.png'
},
{
'name': '004-Mountain01',
'path': 'gfx/tiles/004-Mountain01.png'
},
{
'name': '!Door1',
'path': 'gfx/tiles/Doors.png'
}
],
'layers': {
'0 - NonCollide Base': {
'collides': false,
'collisionBetween': [0, 0],
'type': 'tiles',
'inject_sprites': false
},
'0 - Collide Base': {
'collides': true,
'collisionBetween': [0, 9999],
'type': 'tiles',
'inject_sprites': false
},
'0 - NonCollide Overlay - Pathways': {
'collides': false,
'collisionBetween': [0, 9999],
'type': 'tiles',
'inject_sprites': false
},
'0 - NonCollide Overlay - Below Player': {
'collides': false,
'collisionBetween': [0, 9999],
'type': 'tiles',
'inject_sprites': false
},
'0 - Collide Overlay - Ground Objects': {
'collides': true,
'collisionBetween': [0, 9999],
'type': 'tiles',
'inject_sprites': true
},
'0 - NonCollide Overlay - Above Player (Short)': {
'collides': false,
'collisionBetween': [0, 9999],
'type': 'tiles',
'inject_sprites': false
},
'0 - NonCollide Overlay - Above Player (Tall)': {
'collides': false,
'collisionBetween': [0, 9999],
'type': 'tiles',
'inject_sprites': false
}
},
'path': 'gfx/map.json'
},
'sounds': [
{
'name': 'fountain',
'path': 'sfx/fountain.wav'
},
{
'name': 'fire',
'path': 'sfx/fire.ogg'
},
{
'name': 'calliope',
'path': 'sfx/calliope.mp3'
}
],
'images': [
{
'name': 'lightbox',
'path': 'gfx/ui/lightbox.png'
},
{
'name': 'lightbar',
'path': 'gfx/ui/lightbar.png'
},
{
'name': 'wordbubble',
'path': 'gfx/effects/wordbubble.png'
}
],
'spritesheets': [
{
'name': 'flame',
'path': 'gfx/effects/flame.png',
'width': 32,
'height': 32,
'frames': 96
},
{
'name': 'balloon',
'path': 'gfx/effects/Balloon.png',
'width': 32,
'height': 32,
'frames': 80
},
{
'name': 'player',
'path': 'gfx/sprites/sprite-player.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-male-1',
'path': 'gfx/sprites/sprite-townsfolk-male-1.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-male-2',
'path': 'gfx/sprites/sprite-townsfolk-male-2.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-male-3',
'path': 'gfx/sprites/sprite-townsfolk-male-3.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-male-4',
'path': 'gfx/sprites/sprite-townsfolk-male-4.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-female-1',
'path': 'gfx/sprites/sprite-townsfolk-female-1.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-female-2',
'path': 'gfx/sprites/sprite-townsfolk-female-2.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-female-3',
'path': 'gfx/sprites/sprite-townsfolk-female-3.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-female-4',
'path': 'gfx/sprites/sprite-townsfolk-female-4.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-guard-1',
'path': 'gfx/sprites/sprite-townsfolk-guard-1.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-guard-2',
'path': 'gfx/sprites/sprite-townsfolk-guard-2.png',
'width': 32,
'height': 32,
'frames': 12
}
],
'animations': {
'alerted': {
'frames': [0, 1, 2, 3, 4, 5, 6, 7],
'speed': 4,
'loop': false
},
'concerned': {
'frames': [8, 9, 10, 11, 12, 13, 14, 15],
'speed': 4,
'loop': false
},
'relieved': {
'frames': [40, 41, 42, 43, 44, 45, 46, 47],
'speed': 4,
'loop': false
},
'angry': {
'frames': [48, 49, 50, 51, 52, 53, 54, 55],
'speed': 4,
'loop': false
},
'bipedwalkdown': {
'frames': [1, 2, 0],
'speed': 4,
'loop': true
},
'bipedwalkleft': {
'frames': [4, 5, 3],
'speed': 4,
'loop': true
},
'bipedwalkright': {
'frames': [7, 8, 6],
'speed': 4,
'loop': true
},
'bipedwalkup': {
'frames': [10, 11, 9],
'speed': 4,
'loop': true
},
'bipedrundown': {
'frames': [1, 2, 0],
'speed': 12,
'loop': true
},
'bipedrunleft': {
'frames': [4, 5, 3],
'speed': 12,
'loop': true
},
'bipedrunright': {
'frames': [7, 8, 6],
'speed': 12,
'loop': true
},
'bipedrunup': {
'frames': [10, 11, 9],
'speed': 12,
'loop': true
},
'lantern_small': {
'frames': [24, 25, 26],
'speed': 6,
'loop': true
},
'campfire_small': {
'frames': [6, 7, 8],
'speed': 6,
'loop': true
},
'fire_small': {
'frames': [9, 10, 11],
'speed': 6,
'loop': true
}
}
};
var moonlightDialog = {
"status": {
"townsfolk-male" : {
"unaware" : [
"I'd rather be fishing.",
"Different day, same old stuff.",
"Oi! Where'd that trouble run\noff to now then?",
"The missus is off shoppin', and\nhere I am sittin' on\nme Jack Jones.",
"Oy I'm gonna have a butcher’s at\nthat new tailor's knickers\nhe has for sale.",
"I'm off to the pub to see the\nlads and chew the fat.",
"♪ ♫ Whistling ♪ ♫"
],
"concerned" : [
"Wha… what’s that? Who’s there?",
"Did you hear that?",
"Either I’m hearin’ things, or I\nneed to stop drinkin’ midday.",
"Oi? I don’t want no tomfoolery;\ncome out if you’re there!",
"Must be them darned kids again.",
"What’s that?",
"Did you see that?"
],
"alerted" : [
"Don't you come no closer, you hear?",
"Egads!",
"I'm getting’ outta here!",
"What's going on?!",
"Holy bejeezus!",
"Did you see that?",
"What're you doing?!",
"Get away!",
"Get away from me!",
"Stay away! I know Kung-fu! ... but\nthat would require bravery \nI don't have",
"Guards! GUARDS!"
],
"losthim" : [
"Whew. Glad that’s over.",
"I wasn’t scared!",
"Must’ve been intimidated by\nmy manly physique.",
"That’s right! Run away!",
"Aye, and don’t-cha come back!",
"Spoony Bard...",
"Bloody wanker!"
]
},
"townsfolk-female" : {
"unaware" : [
"My retro shake brings all the\nboys to the yard.",
"I'm off to get my Barnet sorted\nout. I’ll be the best looking\nlady at the gala.",
"It's always all itsy bitsy with\nthem boys at the Rub-a-Dub.",
"I need to get this shopping\nsorted out.",
"What a lovely evening. Perfect\nfor skulking, I would imagine."
],
"concerned" : [
"Wha… what’s that? Who’s there?",
"Did you hear that?",
"Martha? Is that you?",
"I don't want no tomfoolery.\nGo away!",
"What was that? This is how horror\ntheatre bits start…",
"What's that?",
"Did you see that?"
],
"alerted" : [
"Eeeek!",
"Stay away from me!",
"Guards! Guards!",
"What in the nine hells?",
"Get back or I'll swoon!",
"Help! He's after me virtue!"
],
"losthim" : [
"Good riddance! There’s too many\nmale protagonists in\ngames anyhow!",
"I sure am glad that’s over.",
"This town is going straight to hell.",
"I hope he doesn’t come back.",
"I hope he’s caught and hanged!"
]
},
"townsfolk-guard" : {
"unaware" : [
"Just doing my civic duty.",
"Good day, citizens.",
"Honor. Liberty. Justice.\nOh, and pancakes…\nI love pancakes.",
"No loitering.",
"I am the law.",
"May Evil beware and may\nGood dress warmly and\neat plenty of fresh vegetables.",
"We're sworn to protect The City."
],
"concerned" : [
"I sense law-breaking abound.",
"Did you hear something?",
"Did you see that?",
"I know you're around here\nsomewhere, rat…",
"Don't make me look for\nyou in hard-to-reach places!",
"The eyes play tricks\nlike tiny, round devils."
],
"alerted" : [
"Surrender lawbreaker!",
"Halt!",
"Halt! In the name of the… umm, er… me!",
"Prepare for justice, criminal!",
"I am justice!",
"There’s no escaping the law!",
"Surrender thief!",
"Prepare to taste steel!",
"Clear the area! Nobody\npanic! I'll catch him!"
],
"losthim" : [
"I’ll get you next time,\ncriminal scum.",
"Defeat is a harsh mistress.",
"Evil men may get away, but\njustice fights another day.",
"Wickedness flees, evading the\ncold steel of righteousness."
]
}
},
"conversations": {
"townsfolk-male": {
"townsfolk-female": [],
"townsfolk-male": [],
"townsfolk-guard": []
},
"townsfolk-female": {
"townsfolk-male": [],
"townsfolk-female": [],
"townsfolk-guard": [],
},
"townsfolk-guard": {
"townsfolk-male": [],
"townsfolk-female": [],
"townsfolk-guard": []
}
}
};
// Return new array with duplicate values removed
function array_unique(arr) {
var a = [];
var l = arr.length;
for(var i=0; i<l; i++) {
for(var j=i+1; j<l; j++) {
// If arr[i] is found later in the array
if (arr[i] === arr[j])
j = ++i;
}
a.push(arr[i]);
}
return a;
}
function stringSize(str, font)
{
var width = 0;
var height = 0;
var f = font || '12px arial';
str.split("\n").forEach(function(x) {
var o = $('<div>' + x + '</div>')
.css({'position': 'absolute', 'float': 'left', 'visibility': 'hidden', 'font': f})
.appendTo($('body'));
if ( o.width() > width )
width = o.width();
height += 5 + o.height();
o.remove();
}, this);
return [width, height];
}
var EffectSprite = function(game, x, y, key, frame, animation) {
this.update_new_values = function() {
this.animations.destroy();
this.loadTexture(this.sprite_key, 0);
addAnimation(this, this.sprite_animation);
this.animations.play(this.sprite_animation);
}
Phaser.Sprite.call(this, game, x, y, null);
game.physics.arcade.enable(this);
this.collide_with_map = true;
this.collide_with_player = false;
}
EffectSprite.prototype = Object.create(Phaser.Sprite.prototype);
EffectSprite.prototype.constructor = EffectSprite;
var AISprite = function(game, x, y, key, frame) {
this.viewRectangle = function() {
var offset = [];
var size = [];
var multiplier = 1.0;
if ( hasState(this, STATE_ALERTED) ) {
multiplier = 2.0;
}
if ( hasState(this, STATE_FACE_LEFT) ) {
offset = [0, -32 * multiplier];
size = [-this.view_distance, 96];
} else if ( hasState(this, STATE_FACE_RIGHT) ) {
offset = [32, -32 * multiplier];
size = [32 + this.view_distance, 96];
} else if ( hasState(this, STATE_FACE_DOWN) ) {
offset = [-32 * multiplier, 32];
size = [96, this.view_distance];
} else if ( hasState(this, STATE_FACE_UP) ) {
offset = [-32 * multiplier, 0];
size = [96, -this.view_distance];
} else {
console.log("I don't have a facing state?");
return null;
}
size[0] *= multiplier;
size[1] *= multiplier;
return positiveRectangle(this.x + offset[0],
this.y + offset[1],
size[0],
size[1]);
}
this.canSeeSprite = function(spr, debug) {
var vd = this.view_distance;
if ( hasState(this, STATE_FACE_LEFT) ||
hasState(this, STATE_FACE_UP) ) {
// Without this the player can stand in our view distance
// but as long as their left edge is 1 px out we won't see them,
// with this we see their near edge
vd = vd + 32;
}
if ( hasState(this, STATE_ALERTED) )
vd = vd * 2;
var distance = (new Phaser.Line(spr.x, spr.y, this.x, this.y).length);
if ( distance > vd ) {
console.log("Target is outside my view distance (" + distance + " vs " + vd + ")");
return false;
}
var viewrect = this.viewRectangle();
if ( viewrect == null ) {
console.log("I don't have a view rectangle");
return false;
}
var sprrect = positiveRectangle(spr.x, spr.y, 32, 32);
if ( viewrect.intersects(sprrect) || viewrect.containsRect(sprrect) ) {
return true;
}
console.log("I have a view rectangle but it does not intersect or contain the target");
console.log(viewrect, sprrect);
return false;
}
this.enableAwarenessChange = function(state) {
this.awareness_change_enabled = true;
}
this.startAwarenessTimer = function() {
this.awareness_change_enabled = false;
if ( this.awareness_timer !== null )
this.awareness_timer.stop();
this.awareness_timer = game.time.create(false);
this.awareness_timer.add(this.sprite_awareness_duration,
this.enableAwarenessChange,
this);
this.awareness_timer.start()
}
this.setAwarenessEffect = function(state) {
var animkey = "";
if ( hasState(this, state) == true ) {
// restart the awareness timer
this.startAwarenessTimer();
return;
} else if ( (state == STATE_LOSTHIM) &&
(hasState(this, STATE_ALERTED) == false) &&
(hasState(this, STATE_CONCERNED) == false) ) {
return;
}
if ( this.awareness_change_enabled == false &&
state != STATE_ALERTED ) {
return;
}
this.startAwarenessTimer();
setAwarenessState(this, state);
if ( this.awareness_effect !== null ) {
this.awareness_effect.alive = false;
this.awareness_effect.destroy();
this.awareness_effect = null;
}
if ( state == STATE_ALERTED ) {
animkey = "alerted";
} else if ( state == STATE_CONCERNED ) {
animkey = "concerned";
} else if ( state == STATE_LOSTHIM ) {
if ( this.sprite_group == "townsfolk-guard" ) {
animkey = "angry";
} else {
animkey = "relieved";
}
}
if ( animkey == "" )
return;
this.bubble_immediate = true;
this.clearWordBubble();
this.awareness_effect = game.state.states.game.add.sprite(
this.x + 16,
this.y - 16,
'balloon');
addAnimation(this.awareness_effect, animkey);
this.awareness_effect.play(animkey, null, false, true);
}
this.enableWordBubble = function() {
this.enable_word_bubble = true;
this.timer = game.time.create(false);
if ( this.bubble_immediate == true ) {
this.bubble_immediate = false;
this.setWordBubble();
} else {
var timerdelta = 10000 + (game.rnd.integerInRange(0, 20) * 1000);
timerev = this.timer.add(timerdelta, this.setWordBubble, this);
this.timer.start()
}
}
this.clearWordBubble = function() {
if ( this.bubble_text !== null )
this.clear_bubble = true;
this.enable_word_bubble = false;
this.timer = game.time.create(false);
timerev = this.timer.add(1000, this.enableWordBubble, this);
this.timer.start()
}
this.setWordBubble = function()
{
if ( this.bubble_text !== null ||
this.sprite_group == undefined ||
this.enable_world_bubble == false) {
return;
}
aistate = this.state & ( STATE_UNAWARE | STATE_CONCERNED | STATE_ALERTED | STATE_LOSTHIM );
switch ( aistate ) {
case STATE_UNAWARE: {
aistate = "unaware";
break;
}
case STATE_CONCERNED: {
aistate = "concerned";
break;
}
case STATE_ALERTED: {
aistate = "alerted";
break;
}
case STATE_LOSTHIM: {
aistate = "losthim";
break;
}
}
var mylines = moonlightDialog['status'][this.sprite_group][aistate];
bubbleimg = game.cache.getImage('wordbubble');
text = mylines[game.rnd.integerInRange(0, mylines.length-1)];
style = {font: '14px Arial Bold', fill: '#ffffff'}
this.text_size = stringSize(text, style['font']);
this.bubble_sprite = game.add.sprite(this.x, this.y, 'wordbubble');
this.bubble_sprite.anchor.setTo(0.5, 1.0);
this.bubble_sprite.scale.x = Number((this.text_size[0] + 16) / bubbleimg.width);
this.bubble_sprite.scale.y = Number((this.text_size[1] + 16) / bubbleimg.height);
this.bubble_text = game.add.text(this.x, this.y, text, style);
this.snap_bubble_position();
this.timer = game.time.create(false);
timerev = this.timer.add(5000, this.clearWordBubble, this);
this.timer.start()
}
this.snap_bubble_position = function()
{
this.bubble_sprite.x = this.x + 16;
this.bubble_sprite.y = this.y;
var tx = this.bubble_sprite.x - (this.text_size[0]/2);
var ty = this.bubble_sprite.y - (this.bubble_sprite.height) + 8;
this.bubble_text.position.x = tx;
this.bubble_text.position.y = ty;
}
this.blocked = function() {
function f() {
if ( hasState(this, STATE_FACE_LEFT) &&
this.body.blocked.left == true )
return true;
if ( hasState(this, STATE_FACE_RIGHT) &&
this.body.blocked.right == true )
return true;
if ( hasState(this, STATE_FACE_DOWN) &&
this.body.blocked.down == true )
return true;
if ( hasState(this, STATE_FACE_UP) &&
this.body.blocked.up == true )
return true;
return false;
}
console.log("this.blocked? " + f());
return f();
}
this.path_purge = function() {
this.path = [];
this.path_index = 0;
}
this.path_set = function(target, force) {
force = ( typeof force == undefined ? false : force );
if ( force == false &&
this.path.length > 0 &&
this.path_index < this.path_maximum_steps ) {
return false;
}
this.path_purge();
tpath = pathfinder.findPath(
parseInt(this.x/32),
parseInt(this.y/32),
parseInt(target.x/32),
parseInt(target.y/32),
pathfinder_grid.clone()
);
prevpoint = [this.x, this.y];
for ( var i = 0 ; i < tpath.length ; i++ ) {
if ( (prevpoint[0]+prevpoint[1]) == ((tpath[i][0]*32)+(tpath[i][1]*32)) )
continue;
this.path.push(new Phaser.Line(prevpoint[0], prevpoint[1],
tpath[i][0]*32, tpath[i][1]*32));
prevpoint = [tpath[i][0]*32, tpath[i][1]*32];
}
console.log("New path");
console.log(this.path);
return true;
}
this.path_tween_start = function(movingstate)
{
movingState = (typeof movementState == undefined ? movementState : (STATE_MOVING | STATE_RUNNING));
this.path_tweens = [];
prevpos = [this.x, this.y]
for ( var i = 0;
i < Math.min(this.path_maximum_steps, this.path.length) ;
i++ ) {
pl = this.path[i];
movingstate = STATE_MOVING | STATE_RUNNING;
if ( pl.end.x < prevpos[0]) {
movingstate = movingstate | STATE_FACE_LEFT;
} else if ( pl.end.x > prevpos[0] ) {
movingstate = movingstate | STATE_FACE_RIGHT;
}
if ( pl.end.y < prevpos[1] ) {
movingstate = movingstate | STATE_FACE_UP;
} else if ( pl.end.y > prevpos[1] ) {
movingstate = movingstate | STATE_FACE_DOWN;
}
prevpos = [pl.end.x, pl.end.y];
tween = game.add.tween(this);
tween.movingstate = movingstate;
this.path_tweens.push(tween);
tween.to(
{x: (pl.end.x), y: (pl.end.y)},
(TWEEN_DURATION_PERPIXEL_RUNNING * pl.length),
null);
tween.onStart.add(function() {
setMovingState(this._object, this.movingstate);
this._object.animations.play("bipedrun" + spriteFacing(this._object));
}, tween);
tween.onComplete.add(function() {
this._object.path_index += 1;
setMovingState(this._object, getFaceState(this._object));
this._object.animations.play("bipedrun" + spriteFacing(this._object));
this._object.animations.stop();
}, tween);
if ( i > 0 ) {
this.path_tweens[i-1].onComplete.add(tween.start,
tween);
}
}
console.log(this.path_tweens);
if ( this.path_tweens.length > 0 )
this.path_tweens[0].start();
}
this.path_tween_stop = function()
{
this.path_tweens.forEach(function(x) {
x.stop();
game.tweens.remove(x);
}, this);
}
this.turnUnseenDirection = function() {
if ( this.seen_directions.length >= 4 )
this.seen_directions = [];
var directions = [STATE_FACE_DOWN, STATE_FACE_LEFT,
STATE_FACE_RIGHT, STATE_FACE_UP];
var newdirection = directions[game.rnd.integerInRange(0, 3)];
while ( this.seen_directions.indexOf(newdirection) !== -1 ) {
newdirection = directions[game.rnd.integerInRange(0, 3)];
}
console.log("Setting new direction to " + newdirection);
setMovingState(this, newdirection);
this.animations.stop();
this.animations.play("bipedrun" + spriteFacing(this));
this.animations.stop();
if ( this.rotation_timer !== null ) {
this.rotation_timer.stop();
this.rotation_timer = null;
}
}
this.chasetarget = function(target, alertedState, movingstate, visual)
{
alertedState = (typeof alertedState == undefined ? alertedState : STATE_ALERTED);
visual = (typeof visual == undefined ? visual : false);
if ( game.physics.arcade.collide(this, target) )
return;
if ( this.path_index >= this.path.length ) {
this.path_tween_stop();
console.log("I am at the end of my path");
if ( (visual == false) || (this.canSeeSprite(target, false) == true )) {
console.log("I can see the target");
this.setAwarenessEffect(alertedState);
this.path_set(target, true);
this.path_tween_start(movingstate);
} else {
if ( this.rotation_timer == null ) {
console.log("I can't see the target - turning so I can");
this.rotation_timer = game.time.create(false);
timerev = this.rotation_timer.add(250, this.turnUnseenDirection, this);
this.rotation_timer.start()
}
}
} else {
if ( this.path_set(target, this.blocked(true)) == true ) {
console.log("I just got a new path");
if ( (visual == false) || (this.canSeeSprite(target, false) == false )) {
this.path_purge();
this.path_tween_stop();
} else {
this.setAwarenessEffect(alertedState);
this.path_tween_start(movingstate);
}
}
}
}
this.action_chaseplayer = function()
{
var movingstate = STATE_NONE;
this.chasetarget(player,
STATE_ALERTED,
STATE_MOVING | STATE_RUNNING,
true);
return;
}
this.action_reportplayer = function()
{
console.log("I AM REPORTING THE PLAYER");
setSpriteMovement(this);
}
this.action_huntplayer = function()
{
console.log("I AM HUNTING FOR THE PLAYER");
setSpriteMovement(this);
}
this.action_wander = function()
{
var newstate = STATE_NONE;
if ( this.sprite_canmove == false) {
return;
}
if ( game.rnd.integerInRange(0, 100) < 95 )
return;
this.turnUnseenDirection();
addState(this, STATE_MOVING);
setSpriteMovement(this);
}
this.update = function()
{
if ( this.ready_to_update == false )
return;
if ( this.awareness_effect !== null ) {
if ( this.awareness_effect.alive == false ) {
this.awareness_effect.destroy();
this.awareness_effect = null;
} else {
this.awareness_effect.x = this.x + 16;
this.awareness_effect.y = this.y - 16;
}
}
if ( this.bubble_text !== null ) {
if ( this.clear_bubble == true ) {
this.bubble_text.destroy();
this.bubble_sprite.destroy();
this.bubble_text = null;
this.bubble_sprite = null;
this.clear_bubble = false;
} else {
this.snap_bubble_position();
}
}
if ( hasState(this, STATE_ALERTED) ) {
if ( this.sprite_group == "townsfolk-guard" ) {
this.action_chaseplayer();
} else {
this.action_reportplayer();
}
} else if ( hasAnyState(this, [STATE_CONCERNED, STATE_LOSTHIM]) ) {
this.action_huntplayer();
} else {
this.action_wander();
}
}
this.update_new_values = function() {
if ( this.timer !== null )
this.timer.stop();
this.animations.destroy();
this.clearWordBubble();
this.state = STATE_UNAWARE;
this.sprite_can_see_lightmeter = Number(this.sprite_can_see_lightmeter);
this.sprite_canmove = parseBoolean(this.sprite_canmove);
this.sprite_awareness_duration = parseInt(this.sprite_awareness_duration);
this.collide_with_player = parseBoolean(this.collide_with_player);
this.collide_with_map = parseBoolean(this.collide_with_map);
this.carries_light = parseBoolean(this.carries_light);
this.path_maximum_steps = parseInt(this.path_maximum_steps);
this.loadTexture(this.sprite_name, 0);
addAnimation(this, 'bipedwalkleft');
addAnimation(this, 'bipedwalkright');
addAnimation(this, 'bipedwalkup');
addAnimation(this, 'bipedwalkdown');
addAnimation(this, 'bipedrunleft');
addAnimation(this, 'bipedrunright');
addAnimation(this, 'bipedrunup');
addAnimation(this, 'bipedrundown');
setMovingState(this, STATE_FACE_DOWN);
setSpriteMovement(this);
this.ready_to_update = true;
}
var spritenames_by_type = [
'townsfolk-male-1',
'townsfolk-male-2',
'townsfolk-male-3',
'townsfolk-male-4',
'townsfolk-female-1',
'townsfolk-female-2',
'townsfolk-female-3',
'townsfolk-female-4',
'townsfolk-guard-1',
'townsfolk-guard-2'
];
this.ready_to_update = false;
Phaser.Sprite.call(this, game, x, y, null);
game.physics.arcade.enable(this);
this.body.immovable = true;
pathfinder_grid = [];
this.walkables = [];
this.path = [];
this.path_tweens = [];
this.path_maximum_steps = 4;
this.awareness_change_enabled = true;
this.lightmeter = 1.0;
this.sprite_can_see_lightmeter = 0.3;
this.awareness_effect = null;
this.awareness_timer = null;
this.seen_directions = [];
this.sprite_awareness_duration = 60000;
this.sprite_canmove = 'true';
this.collide_with_player = 'true';
this.collide_with_map = 'true';
this.carries_light = 'false';
this.view_distance = 32 * 5;
this.timer = null;
this.rotation_timer = null;
this.origin = new Phaser.Point(x, y);
this.bubble_immediate = false;
this.bubble_text = null;
this.enable_word_bubble = false;
this.body.collideWorldBounds = true;
this.sprite_name = "townsfolk-male-1";
this.sprite_group = "townsfolk-male";
this.update_new_values();
}
AISprite.prototype = Object.create(Phaser.Sprite.prototype);
AISprite.prototype.constructor = AISprite;
function rotatePoints(arr, x, y, degrees)
{
arr.forEach(function(p) {
p.rotate(x, y, degrees, true);
}, this);
}
function positiveRectangle(x, y, w, h) {
if ( w < 0 ) {
w = -(w);
x = x - w;
}
if ( h < 0 ) {
h = -(h);
y = y - h;
}
return new Phaser.Rectangle(x, y, w, h);
}
function addAnimation(obj, anim)
{
a = moonlightSettings['animations'][anim]
obj.animations.add(anim, a['frames'], a['speed'], a['loop'])
}
var GameState = function(game) {
}
GameState.prototype.create = function()
{
this.map = this.add.tilemap('map');
for (var k in moonlightSettings['map']['tilesets']) {
var ts = moonlightSettings['map']['tilesets'][k];
this.map.addTilesetImage(ts['name']);
}
this.map_collision_layers = [];
pfgrid = [];
for (var ln in moonlightSettings['map']['layers']) {
lp = moonlightSettings['map']['layers'][ln];
if ( lp['type'] == "tiles" ) {
layer = this.map.createLayer(ln);
this.map.setCollisionBetween(
lp['collisionBetween'][0],
lp['collisionBetween'][1],
lp['collides'],
ln
);
if ( lp['inject_sprites'] == true ) {
this.aiSprites = game.add.group();
this.aiSprites.debug = true;
this.map.createFromObjects('AI', 3544, 'player', 0, true, false, this.aiSprites, AISprite);
this.aiSprites.forEach(function(spr) {
spr.update_new_values();
}, this)
player = this.add.sprite((19 * 32), (21 * 32), 'player');
player.lightmeter = 0;
};
if ( lp['collides'] == true ) {
this.map_collision_layers.push(layer);
console.log(layer);
for (var i = 0; i < layer.layer.data.length; i++)
{
if ( i >= pfgrid.length )
pfgrid[i] = [];
for (var j = 0; j < layer.layer.data[i].length; j++)
{
if (layer.layer.data[i][j].index > 0) {
pfgrid[i][j] = 1;
} else if ( pfgrid[i][j] != 1 ) {
pfgrid[i][j] = 0;
}
}
}
}
layer.resizeWorld();
}
}
console.log(pfgrid)
pathfinder_grid = new PF.Grid(this.map.width,
this.map.height,
pfgrid);
pathfinder = new PF.AStarFinder({allowDiagonal: false});
console.log(pathfinder_grid);
console.log(pathfinder);
this.physics.arcade.enable(player);
player.body.center = new Phaser.Point(player.body.width / 2, player.body.height + player.body.halfHeight);
player.body.collideWorldBounds = true;
//player.body.immovable = true;
addAnimation(player, 'bipedwalkleft');
addAnimation(player, 'bipedwalkright');
addAnimation(player, 'bipedwalkup');
addAnimation(player, 'bipedwalkdown');
addAnimation(player, 'bipedrunleft');
addAnimation(player, 'bipedrunright');
addAnimation(player, 'bipedrunup');
addAnimation(player, 'bipedrundown');
this.camera.follow(player, Phaser.Camera.FOLLOW_TOPDOWN);
controls = game.input.keyboard.createCursorKeys();
this.effectSprites = game.add.group();
this.map.createFromObjects('EffectSprites', 5, 'player', 0, true, false, this.effectSprites, EffectSprite);
this.effectSprites.forEach(function(spr) {
spr.update_new_values();
}, this)
this.shadowTexture = game.add.bitmapData(game.world.width, game.world.height);
// drop this lower to make the map darker
this.shadowTextureColor = 'rgb(60, 60, 60)';
this.shadowSprite = game.add.image(0, 0, this.shadowTexture);
this.shadowSprite.blendMode = Phaser.blendModes.MULTIPLY;
this.staticLights = game.add.group();
this.map.createFromObjects('Lights', 97, 'player', 0, true, false, this.staticLights, Light);
this.staticLights.forEach(function(light) {
light.update_new_values();
}, this)
this.staticSounds = game.add.group();
this.map.createFromObjects('Sounds', 11, 'player', 0, true, false, this.staticSounds, SoundSprite);
this.staticSounds.forEach(function(snd) {
snd.update_new_values();
}, this)
this.bubble_group = game.add.group();
this.uigroup = game.add.group();
this.game.time.advancedTiming = true;
this.fpsText = this.game.add.text(
20, 20, '', { font: '16px Arial', fill: '#ffffff' }, this.uigroup
);
this.lightbox = this.game.add.image(game.camera.width / 2 - 50,
game.camera.height - 40,
'lightbox',
0,
this.uigroup);
this.lightbar = this.game.add.image(this.lightbox.x + 3,
this.lightbox.y + 3,
'lightbar',
0,
this.uigroup);
this.lightbar_image = game.cache.getImage('lightbar');
this.lightbar_crop = positiveRectangle(0,
0,
this.lightbar_image.width,
this.lightbar_image.height);
this.uigroup.setAll('fixedToCamera', true);
}
GameState.prototype.updateShadowTexture = function() {
this.shadowTexture.context.fillStyle = this.shadowTextureColor;
this.shadowTexture.context.fillRect(0, 0, game.world.width, game.world.height);
this.staticLights.forEach(function(light) {
if ( light.always_render !== true ) {
var r1 = positiveRectangle(this.game.camera.x,
this.game.camera.y,
this.game.camera.width,
this.game.camera.height);
if ( ! light.rect.intersects(r1) ) {
return;
}
}
if ( light.flicker ) {
var radius = light.radius + game.rnd.integerInRange(1,10);
} else {
var radius = light.radius;
}
light.rendered_radius = radius;
var gradient =
this.shadowTexture.context.createRadialGradient(
light.x + 16, light.y + 16, light.fade,
light.x + 16, light.y + 16, radius);
gradient.addColorStop(0, light.color_start);
gradient.addColorStop(1, light.color_stop);
this.shadowTexture.context.beginPath();
this.shadowTexture.context.fillStyle = gradient;
this.shadowTexture.context.arc(light.x + 16, light.y + 16, radius, 0, Math.PI*2);
this.shadowTexture.context.fill();
}, this);
this.shadowTexture.dirty = true;
};
function getFaceState(spr)
{
if ( hasState(spr, STATE_FACE_LEFT) )
return STATE_FACE_LEFT;
if ( hasState(spr, STATE_FACE_RIGHT) )
return STATE_FACE_RIGHT;
if ( hasState(spr, STATE_FACE_DOWN) )
return STATE_FACE_DOWN;
if ( hasState(spr, STATE_FACE_UP) )
return STATE_FACE_UP;
}
function getMoveState(spr)
{
return ( hasState(spr, STATE_MOVING) ||
hasState(spr, STATE_RUNNING) );
}
function delState(spr, state)
{
if ( hasState(spr, state) )
spr.state = spr.state ^ state;
}
function addState(spr, state)
{
spr.state = spr.state | state;
}
function setMovingState(spr, state)
{
delState(spr, STATE_FACE_LEFT);
delState(spr, STATE_FACE_RIGHT);
delState(spr, STATE_FACE_DOWN);
delState(spr, STATE_FACE_UP);
delState(spr, STATE_MOVING);
delState(spr, STATE_RUNNING);
addState(spr, state);
}
function setAwarenessState(spr, state)
{
delState(spr, STATE_UNAWARE);
delState(spr, STATE_CONCERNED);
delState(spr, STATE_ALERTED);
delState(spr, STATE_LOSTHIM);
addState(spr, state);
}
function exchangeState(spr, state1, state2)
{
delState(spr, state1);
addState(spr, state2);
}
function hasAnyState(spr, states)
{
var hasstate = false;
states.forEach(function(x) {
if ( hasState(spr, x) )
hasstate = true;
}, this);
return hasstate;
}
function hasState(spr, state)
{
if ( (spr.state & state) == state )
return true;
return false;
}
function spriteFacing(spr)
{
if ( hasState(spr, STATE_FACE_LEFT) )
return "left";
if ( hasState(spr, STATE_FACE_RIGHT) )
return "right";
if ( hasState(spr, STATE_FACE_DOWN) )
return "down";
if ( hasState(spr, STATE_FACE_UP) )
return "up";
}
function parseBoolean(val)
{
return ( val == 'true' || val == true );
}
function setSpriteMovement(spr, velocity)
{
var x = 0;
var y = 0;
var dir = spriteFacing(spr);
velocity = ( typeof velocity == undefined ? velocity : [SPEED_WALKING,
SPEED_RUNNING] );
spr.body.setSize(16, 16, 8, 16);
if ( hasState(spr, STATE_RUNNING) ) {
if ( velocity !== false )
velocity = velocity[1];
console.log("Playing bipedrun" + dir);
spr.animations.play("bipedrun" + dir);
} else if ( hasState(spr, STATE_MOVING) ) {
if ( velocity !== false )
velocity = velocity[0];
console.log("Playing bipedwalk" + dir);
spr.animations.play("bipedwalk" + dir);
} else {
if ( velocity !== false ) {
spr.body.velocity.x = 0;
spr.body.velocity.y = 0;
}
spr.animations.stop();
return;
}
if ( velocity !== false ) {
if ( dir == "left" ) {
spr.body.velocity.x = -(velocity * velocity);
spr.body.velocity.y = 0;
} else if ( dir == "right" ) {
spr.body.velocity.x = (velocity * velocity);
spr.body.velocity.y = 0;
} else if ( dir == "up" ) {
spr.body.velocity.x = 0;
spr.body.velocity.y = -(velocity * velocity);
} else if ( dir == "down" ) {
spr.body.velocity.x = 0;
spr.body.velocity.y = (velocity * velocity);
}
}
}
GameState.prototype.check_input = function()
{
player.body.velocity.x = 0;
player.body.velocity.y = 0;
velocityMod = 0;
var newstate = 0;
if ( controls.up.isDown) {
if ( controls.up.shiftKey ) {
newstate = (STATE_FACE_UP | STATE_MOVING | STATE_RUNNING);
} else {
newstate = (STATE_FACE_UP | STATE_MOVING );
}
} else if ( controls.down.isDown ) {
if ( controls.down.shiftKey ) {
newstate = (STATE_FACE_DOWN | STATE_MOVING | STATE_RUNNING);
} else {
newstate = (STATE_FACE_DOWN | STATE_MOVING );
}
} else if ( controls.left.isDown ) {
if ( controls.left.shiftKey ) {
newstate = (STATE_FACE_LEFT | STATE_MOVING | STATE_RUNNING);
} else {
newstate = (STATE_FACE_LEFT | STATE_MOVING );
}
} else if ( controls.right.isDown ) {
if ( controls.right.shiftKey ) {
newstate = (STATE_FACE_RIGHT | STATE_MOVING | STATE_RUNNING);
} else {
newstate = (STATE_FACE_RIGHT | STATE_MOVING );
}
} else {
newstate = STATE_NONE;
}
setMovingState(player, newstate);
setSpriteMovement(player);
}
GameState.prototype.update_player_lightmeter = function() {
lightValue = 0;
this.staticLights.forEach(function(light) {
var left = player.x;
var top = player.y + 32;
if ( player.y < this.y )
top = player.y;
if ( player.x + this.x )
left = player.x + 32;
line = new Phaser.Line(left, top, light.x + 16, light.y + 16);
if ( line.length > light.rendered_radius)
return;
var length = line.length;
var lv = light.light_meter - (Number(length) / Number(light.rendered_radius));
if ( lv > lightValue ) {
lightValue = lv;
}
}, this)
player.lightmeter = lightValue;
this.lightbar_crop.width = (this.lightbar_image.width * lightValue);
this.lightbar.crop(this.lightbar_crop);
}
GameState.prototype.update = function()
{
this.check_input();
this.update_player_lightmeter();
for (var ln in this.map_collision_layers ) {
layer = this.map_collision_layers[ln];
this.physics.arcade.collide(player, layer);
}
function _fix_audio_relative(x) {
x.adjust_relative_to(player);
}
this.staticSounds.forEach(_fix_audio_relative, this);
function _inner_collide(x) {
if ( x.collide_with_map == true ) {
for ( var ln in this.map_collision_layers ) {
layer = this.map_collision_layers[ln];
this.physics.arcade.collide(x, layer);
}
}
if ( x.collide_with_player == false )
return;
if ( x.canSeeSprite(player, false) == true ) {
if ( this.physics.arcade.collide(x, player) ) {
x.setAwarenessEffect(STATE_ALERTED);
} else if ( player.lightmeter >= x.sprite_can_see_lightmeter ) {
x.setAwarenessEffect(STATE_ALERTED);
} else {
x.setAwarenessEffect(STATE_CONCERNED);
}
return;
} else {
if ( hasState(x, STATE_LOSTHIM) == false ) {
x.setAwarenessEffect(STATE_LOSTHIM);
} else {
x.setAwarenessEffect(STATE_UNAWARE);
}
}
this.physics.arcade.collide(x, player);
}
this.effectSprites.forEach(_inner_collide, this);
this.aiSprites.forEach(_inner_collide, this);
this.updateShadowTexture();
if ( this.aiSprites.debug == true ) {
function _draw_viewrect(x) {
var r = x.viewRectangle();
if ( r == null )
return;
this.shadowTexture.context.fillStyle = 'rgb(128, 128, 128)';
this.shadowTexture.context.fillRect(r.left,
r.top,
r.width,
r.height);
}
this.aiSprites.forEach(_draw_viewrect, this);
function _draw_aipath(x) {
var p = x.path;
if ( p == null )
return;
this.shadowTexture.context.fillStyle = 'rgb(255, 128, 128)';
p.forEach(function(r) {
this.shadowTexture.context.fillRect(r.start.x,
r.start.y,
r.end.x - r.start.x,
r.end.y - r.start.y);
}, this);
}
this.aiSprites.forEach(_draw_aipath, this);
}
if (game.time.fps !== 0) {
this.fpsText.setText(game.time.fps + ' FPS');
}
}
function Boot()
{
Phaser.State.call(game, this);
}
var Boot = function(game) {
}
Boot.prototype.preload = function()
{
game.load.image('preloader', 'gfx/ui/preloader.png');
};
Boot.prototype.create = function()
{
this.input.maxPointers = 1;
this.stage.disableVisibilityChange = false;
this.stage.scale.pageAlignHoritzontally = true;
game.state.start('preloader', true, false);
}
var Preloader = function(game) {
}
Preloader.prototype.preload = function()
{
this.preloadBar = game.add.sprite(0, 0, 'preloader');
this.preloadBar.anchor.setTo(0.5, 0.5);
this.preloadBar.x = game.camera.x + (game.camera.width / 2);
this.preloadBar.y = game.camera.y + (game.camera.width / 2);
game.load.setPreloadSprite(this.preloadBar, 0);
for (var k in moonlightSettings['map']['tilesets']) {
var ts = moonlightSettings['map']['tilesets'][k];
this.load.image(ts['name'], ts['path']);
}
for (var k in moonlightSettings['images']) {
var i = moonlightSettings['images'][k];
this.load.image(i['name'], i['path']);
}
for (var k in moonlightSettings['sounds']) {
var s = moonlightSettings['sounds'][k];
this.load.audio(s['name'], s['path']);
}
for (var k in moonlightSettings['spritesheets']) {
var s = moonlightSettings['spritesheets'][k]
game.load.spritesheet(s['name'], s['path'], s['width'], s['height'], s['frames'])
}
this.load.tilemap('map',
moonlightSettings['map']['path'],
null,
Phaser.Tilemap.TILED_JSON);
}
Preloader.prototype.create = function()
{
function goalready() {
this.preloadBar.destroy();
game.state.start('game', true, false);
}
var tween = this.add.tween(this.preloadBar).to({ alpha: 0 }, 1000, Phaser.Easing.Linear.None, true);
tween.onComplete.add(goalready, this);
}
game.state.add('boot', Boot, false);
game.state.add('preloader', Preloader, false);
game.state.add('game', GameState, false);
game.state.start('boot');
| moonlight/js/moonlight-skulk.js | SPEED_WALKING = 8;
SPEED_RUNNING = 14;
// Millisecond durations per tweens, per tile
TWEEN_DURATION_PERPIXEL_RUNNING = 5;
TWEEN_DURATION_PERPIXEL_WALKING = 9;
TWEEN_DURATION_PERTILE_RUNNING = TWEEN_DURATION_PERPIXEL_RUNNING * 32;
TWEEN_DURATION_PERTILE_WALKING = TWEEN_DURATION_PERPIXEL_WALKING * 32;
STATE_NONE = 0;
STATE_UNAWARE = 1 << 1;
STATE_CONCERNED = 1 << 2;
STATE_ALERTED = 1 << 3;
STATE_LOSTHIM = 1 << 4;
STATE_RUNNING = 1 << 5;
STATE_FACE_LEFT = 1 << 6;
STATE_FACE_RIGHT = 1 << 7;
STATE_FACE_UP = 1 << 8;
STATE_FACE_DOWN = 1 << 9;
STATE_MOVING = 1 << 10;
STATES_AWARENESS = (STATE_UNAWARE | STATE_CONCERNED | STATE_ALERTED | STATE_LOSTHIM);
STATES_MOVEMENT = (STATE_MOVING | STATE_RUNNING);
STATES_FACE = (STATE_FACE_LEFT | STATE_FACE_RIGHT | STATE_FACE_DOWN | STATE_FACE_UP);
SPRITE_TOWNSFOLK_MALE = 1;
SPRITE_TOWNSFOLK_FEMALE = 2;
SPRITE_TOWNSFOLK_GUARD = 3;
SPRITE_TOWNSFOLK_MALE1 = 1;
SPRITE_TOWNSFOLK_MALE2 = 2;
SPRITE_TOWNSFOLK_MALE3 = 3;
SPRITE_TOWNSFOLK_MALE4 = 4;
SPRITE_TOWNSFOLK_FEMALE1 = 5;
SPRITE_TOWNSFOLK_FEMALE2 = 6;
SPRITE_TOWNSFOLK_FEMALE3 = 7;
SPRITE_TOWNSFOLK_FEMALE4 = 8;
SPRITE_TOWNSFOLK_GUARD1 = 9;
SPRITE_TOWNSFOLK_GUARD2 = 10;
var pathfinder = null;
var pathfinder_grid = null;
var game = new Phaser.Game(640, 480, Phaser.AUTO, '');
// Create torch objects
// Light constructor
var Light = function(game, x, y, key, frame, radius, fade, color_start, color_stop, flicker, always_render, light_meter) {
color_start = ( typeof color_start == undefined ? color_start : 'rgba(255, 255, 255, 1.0)');
color_stop = ( typeof color_stop == undefined ? color_stop : 'rgba(255, 255, 255, 0.0)');
fade = ( typeof fade == undefined ? fade : 0.25);
radius = ( typeof radius == undefined ? radius : 64);
flicker = ( typeof flicker == undefined ? flicker : false);
always_render = ( typeof always_render == undefined ? always_render : false);
light_meter = ( typeof light_meter == undefined ? light_meter : 1.0 );
Phaser.Sprite.call(this, game, x, y, null);
// Set the pivot point for this sprite to the center
this.anchor.setTo(0.5, 0.5);
this.color_start = color_start;
this.color_stop = color_stop;
this.radius = radius;
this.rendered_radius = radius;
this.fade = radius * fade
this.light_meter = light_meter;
this.always_render = always_render
this.rect = positiveRectangle(this.x - radius, this.y - radius, radius * 2, radius * 2)
this.flicker = flicker;
};
// Lightes are a type of Phaser.Sprite
Light.prototype = Object.create(Phaser.Sprite.prototype);
Light.prototype.constructor = Light;
Light.prototype.update_new_values = function() {
this.light_meter = Number(this.light_meter);
this.radius = parseInt(this.radius);
this.fade = this.radius * Number(this.fade);
this.flicker = parseBoolean(this.flicker);
this.always_render = parseBoolean(this.always_render);
this.rect = positiveRectangle(this.x - this.radius, this.y - this.radius, this.radius * 2, this.radius * 2)
}
function SoundSprite(game, x, y, key, frame,
sound_key,
sound_marker,
sound_position,
sound_volume,
sound_loop,
sound_forcerestart,
sound_distance,
sound_nofade)
{
Phaser.Sprite.call(this, game, x, y, null);
this.sound_key = sound_key;
this.sound_marker = ( typeof sound_marker == undefined ? sound_marker : '');
this.sound_volume = ( typeof sound_volume == undefined ? sound_volume : 1.0 );
this.sound_position = ( typeof sound_position == undefined ? sound_position : 1.0 );
this.sound_loop = ( typeof sound_loop == undefined ? sound_loop : true );
this.sound_forcerestart = ( typeof sound_forcerestart == undefined ? sound_forcerestart : false );
var def_distance = Math.sqrt(
Number((game.camera.width/2) * (game.camera.width/2)) +
Number((game.camera.height/2) * (game.camera.height/2))
);
this.sound_distance = ( typeof sound_distance == undefined ? sound_distance : def_distance);
this.sound_nofade = (typeof sound_nofade == undefined ? sound_nofade : false);
this.sound = null;
}
SoundSprite.prototype = Object.create(Phaser.Sprite.prototype);
SoundSprite.prototype.constructor = Light;
SoundSprite.prototype.update_new_values = function() {
if ( this.sound_key == null ) {
if ( this.sound !== null ) {
this.sound.stop();
}
return;
}
this.sound_position = parseInt(this.sound_position);
this.sound_distance = Number(this.sound_distance);
this.sound_volume = Number(this.sound_volume);
this.sound_loop = parseBoolean(this.sound_loop);
this.sound_forcerestart = parseBoolean(this.sound_forcerestart);
this.sound_nofade = parseBoolean(this.sound_nofade);
if ( this.sound !== null )
this.sound.stop();
this.sound = game.add.audio(this.sound_key, this.sound_volume, this.sound_loop);
this.sound.play(
this.sound_marker,
this.sound_position,
this.sound_volume,
this.sound_loop,
this.sound_forcerestart);
}
SoundSprite.prototype.adjust_relative_to = function(spr) {
if ( this.sound_nofade == true ) {
this.sound.volume = this.sound_volume;
return;
}
// The volume of any given sound is equal to the length of the
// hypotenuse of a triangle drawn from the point (p) to the
// sprite in question
var xd = (spr.x - this.x);
if ( xd < 0 )
xd = -(xd);
var yd = (spr.y - this.y);
if ( yd < 0 )
yd = -(yd);
var hyp = Math.sqrt(Number(xd * xd) + Number(yd * yd));
this.sound.volume = (1.0 - Number(hyp / this.sound_distance));
// Math.max doesn't work here??
if ( this.sound.volume < 0 )
this.sound.volume = 0;
}
var moonlightSettings = {
'map' : {
'tilesets': [
{
'name': 'bigtop',
'path': 'gfx/tiles/bigtop.png'
},
{
'name': '002-Woods01',
'path': 'gfx/tiles/002-Woods01.png'
},
{
'name': '009-CastleTown01',
'path': 'gfx/tiles/009-CastleTown01.png'
},
{
'name': '010-CastleTown02',
'path': 'gfx/tiles/010-CastleTown02.png'
},
{
'name': '025-Castle01',
'path': 'gfx/tiles/025-Castle01.png'
},
{
'name': '026-Castle02',
'path': 'gfx/tiles/026-Castle02.png'
},
{
'name': '027-Castle03',
'path': 'gfx/tiles/027-Castle03.png'
},
{
'name': '027-Castle03',
'path': 'gfx/tiles/027-Castle03.png'
},
{
'name': '028-Church01',
'path': 'gfx/tiles/028-Church01.png'
},
{
'name': '029-Church02',
'path': 'gfx/tiles/029-Church02.png'
},
{
'name': '034-Bridge01',
'path': 'gfx/tiles/034-Bridge01.png'
},
{
'name': '035-Ruins01',
'path': 'gfx/tiles/035-Ruins01.png'
},
{
'name': '037-Fort01',
'path': 'gfx/tiles/037-Fort01.png'
},
{
'name': '038-Fort02',
'path': 'gfx/tiles/038-Fort02.png'
},
{
'name': '039-Tower01',
'path': 'gfx/tiles/039-Tower01.png'
},
{
'name': '040-Tower02',
'path': 'gfx/tiles/040-Tower02.png'
},
{
'name': '041-EvilCastle01',
'path': 'gfx/tiles/041-EvilCastle01.png'
},
{
'name': '042-EvilCastle02',
'path': 'gfx/tiles/042-EvilCastle02.png'
},
{
'name': '048-Sewer01',
'path': 'gfx/tiles/048-Sewer01.png'
},
{
'name': '004-Mountain01',
'path': 'gfx/tiles/004-Mountain01.png'
},
{
'name': '!Door1',
'path': 'gfx/tiles/Doors.png'
}
],
'layers': {
'0 - NonCollide Base': {
'collides': false,
'collisionBetween': [0, 0],
'type': 'tiles',
'inject_sprites': false
},
'0 - Collide Base': {
'collides': true,
'collisionBetween': [0, 9999],
'type': 'tiles',
'inject_sprites': false
},
'0 - NonCollide Overlay - Pathways': {
'collides': false,
'collisionBetween': [0, 9999],
'type': 'tiles',
'inject_sprites': false
},
'0 - NonCollide Overlay - Below Player': {
'collides': false,
'collisionBetween': [0, 9999],
'type': 'tiles',
'inject_sprites': false
},
'0 - Collide Overlay - Ground Objects': {
'collides': true,
'collisionBetween': [0, 9999],
'type': 'tiles',
'inject_sprites': true
},
'0 - NonCollide Overlay - Above Player (Short)': {
'collides': false,
'collisionBetween': [0, 9999],
'type': 'tiles',
'inject_sprites': false
},
'0 - NonCollide Overlay - Above Player (Tall)': {
'collides': false,
'collisionBetween': [0, 9999],
'type': 'tiles',
'inject_sprites': false
}
},
'path': 'gfx/map.json'
},
'sounds': [
{
'name': 'fountain',
'path': 'sfx/fountain.wav'
},
{
'name': 'fire',
'path': 'sfx/fire.ogg'
},
{
'name': 'calliope',
'path': 'sfx/calliope.mp3'
}
],
'images': [
{
'name': 'lightbox',
'path': 'gfx/ui/lightbox.png'
},
{
'name': 'lightbar',
'path': 'gfx/ui/lightbar.png'
},
{
'name': 'wordbubble',
'path': 'gfx/effects/wordbubble.png'
}
],
'spritesheets': [
{
'name': 'flame',
'path': 'gfx/effects/flame.png',
'width': 32,
'height': 32,
'frames': 96
},
{
'name': 'balloon',
'path': 'gfx/effects/Balloon.png',
'width': 32,
'height': 32,
'frames': 80
},
{
'name': 'player',
'path': 'gfx/sprites/sprite-player.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-male-1',
'path': 'gfx/sprites/sprite-townsfolk-male-1.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-male-2',
'path': 'gfx/sprites/sprite-townsfolk-male-2.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-male-3',
'path': 'gfx/sprites/sprite-townsfolk-male-3.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-male-4',
'path': 'gfx/sprites/sprite-townsfolk-male-4.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-female-1',
'path': 'gfx/sprites/sprite-townsfolk-female-1.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-female-2',
'path': 'gfx/sprites/sprite-townsfolk-female-2.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-female-3',
'path': 'gfx/sprites/sprite-townsfolk-female-3.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-female-4',
'path': 'gfx/sprites/sprite-townsfolk-female-4.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-guard-1',
'path': 'gfx/sprites/sprite-townsfolk-guard-1.png',
'width': 32,
'height': 32,
'frames': 12
},
{
'name': 'townsfolk-guard-2',
'path': 'gfx/sprites/sprite-townsfolk-guard-2.png',
'width': 32,
'height': 32,
'frames': 12
}
],
'animations': {
'alerted': {
'frames': [0, 1, 2, 3, 4, 5, 6, 7],
'speed': 4,
'loop': false
},
'concerned': {
'frames': [8, 9, 10, 11, 12, 13, 14, 15],
'speed': 4,
'loop': false
},
'relieved': {
'frames': [40, 41, 42, 43, 44, 45, 46, 47],
'speed': 4,
'loop': false
},
'angry': {
'frames': [48, 49, 50, 51, 52, 53, 54, 55],
'speed': 4,
'loop': false
},
'bipedwalkdown': {
'frames': [1, 2, 0],
'speed': 4,
'loop': true
},
'bipedwalkleft': {
'frames': [4, 5, 3],
'speed': 4,
'loop': true
},
'bipedwalkright': {
'frames': [7, 8, 6],
'speed': 4,
'loop': true
},
'bipedwalkup': {
'frames': [10, 11, 9],
'speed': 4,
'loop': true
},
'bipedrundown': {
'frames': [1, 2, 0],
'speed': 12,
'loop': true
},
'bipedrunleft': {
'frames': [4, 5, 3],
'speed': 12,
'loop': true
},
'bipedrunright': {
'frames': [7, 8, 6],
'speed': 12,
'loop': true
},
'bipedrunup': {
'frames': [10, 11, 9],
'speed': 12,
'loop': true
},
'lantern_small': {
'frames': [24, 25, 26],
'speed': 6,
'loop': true
},
'campfire_small': {
'frames': [6, 7, 8],
'speed': 6,
'loop': true
},
'fire_small': {
'frames': [9, 10, 11],
'speed': 6,
'loop': true
}
}
};
var moonlightDialog = {
"status": {
"townsfolk-male" : {
"unaware" : [
"I'd rather be fishing.",
"Different day, same old stuff.",
"Oi! Where'd that trouble run\noff to now then?",
"The missus is off shoppin', and\nhere I am sittin' on\nme Jack Jones.",
"Oy I'm gonna have a butcher’s at\nthat new tailor's knickers\nhe has for sale.",
"I'm off to the pub to see the\nlads and chew the fat.",
"♪ ♫ Whistling ♪ ♫"
],
"concerned" : [
"Wha… what’s that? Who’s there?",
"Did you hear that?",
"Either I’m hearin’ things, or I\nneed to stop drinkin’ midday.",
"Oi? I don’t want no tomfoolery;\ncome out if you’re there!",
"Must be them darned kids again.",
"What’s that?",
"Did you see that?"
],
"alerted" : [
"Don't you come no closer, you hear?",
"Egads!",
"I'm getting’ outta here!",
"What's going on?!",
"Holy bejeezus!",
"Did you see that?",
"What're you doing?!",
"Get away!",
"Get away from me!",
"Stay away! I know Kung-fu! ... but\nthat would require bravery \nI don't have",
"Guards! GUARDS!"
],
"losthim" : [
"Whew. Glad that’s over.",
"I wasn’t scared!",
"Must’ve been intimidated by\nmy manly physique.",
"That’s right! Run away!",
"Aye, and don’t-cha come back!",
"Spoony Bard...",
"Bloody wanker!"
]
},
"townsfolk-female" : {
"unaware" : [
"My retro shake brings all the\nboys to the yard.",
"I'm off to get my Barnet sorted\nout. I’ll be the best looking\nlady at the gala.",
"It's always all itsy bitsy with\nthem boys at the Rub-a-Dub.",
"I need to get this shopping\nsorted out.",
"What a lovely evening. Perfect\nfor skulking, I would imagine."
],
"concerned" : [
"Wha… what’s that? Who’s there?",
"Did you hear that?",
"Martha? Is that you?",
"I don't want no tomfoolery.\nGo away!",
"What was that? This is how horror\ntheatre bits start…",
"What's that?",
"Did you see that?"
],
"alerted" : [
"Eeeek!",
"Stay away from me!",
"Guards! Guards!",
"What in the nine hells?",
"Get back or I'll swoon!",
"Help! He's after me virtue!"
],
"losthim" : [
"Good riddance! There’s too many\nmale protagonists in\ngames anyhow!",
"I sure am glad that’s over.",
"This town is going straight to hell.",
"I hope he doesn’t come back.",
"I hope he’s caught and hanged!"
]
},
"townsfolk-guard" : {
"unaware" : [
"Just doing my civic duty.",
"Good day, citizens.",
"Honor. Liberty. Justice.\nOh, and pancakes…\nI love pancakes.",
"No loitering.",
"I am the law.",
"May Evil beware and may\nGood dress warmly and\neat plenty of fresh vegetables.",
"We're sworn to protect The City."
],
"concerned" : [
"I sense law-breaking abound.",
"Did you hear something?",
"Did you see that?",
"I know you're around here\nsomewhere, rat…",
"Don't make me look for\nyou in hard-to-reach places!",
"The eyes play tricks\nlike tiny, round devils."
],
"alerted" : [
"Surrender lawbreaker!",
"Halt!",
"Halt! In the name of the… umm, er… me!",
"Prepare for justice, criminal!",
"I am justice!",
"There’s no escaping the law!",
"Surrender thief!",
"Prepare to taste steel!",
"Clear the area! Nobody\npanic! I'll catch him!"
],
"losthim" : [
"I’ll get you next time,\ncriminal scum.",
"Defeat is a harsh mistress.",
"Evil men may get away, but\njustice fights another day.",
"Wickedness flees, evading the\ncold steel of righteousness."
]
}
},
"conversations": {
"townsfolk-male": {
"townsfolk-female": [],
"townsfolk-male": [],
"townsfolk-guard": []
},
"townsfolk-female": {
"townsfolk-male": [],
"townsfolk-female": [],
"townsfolk-guard": [],
},
"townsfolk-guard": {
"townsfolk-male": [],
"townsfolk-female": [],
"townsfolk-guard": []
}
}
};
// Return new array with duplicate values removed
function array_unique(arr) {
var a = [];
var l = arr.length;
for(var i=0; i<l; i++) {
for(var j=i+1; j<l; j++) {
// If arr[i] is found later in the array
if (arr[i] === arr[j])
j = ++i;
}
a.push(arr[i]);
}
return a;
}
function stringSize(str, font)
{
var width = 0;
var height = 0;
var f = font || '12px arial';
str.split("\n").forEach(function(x) {
var o = $('<div>' + x + '</div>')
.css({'position': 'absolute', 'float': 'left', 'visibility': 'hidden', 'font': f})
.appendTo($('body'));
if ( o.width() > width )
width = o.width();
height += 5 + o.height();
o.remove();
}, this);
return [width, height];
}
var EffectSprite = function(game, x, y, key, frame, animation) {
this.update_new_values = function() {
this.animations.destroy();
this.loadTexture(this.sprite_key, 0);
addAnimation(this, this.sprite_animation);
this.animations.play(this.sprite_animation);
}
Phaser.Sprite.call(this, game, x, y, null);
game.physics.arcade.enable(this);
this.collide_with_map = true;
this.collide_with_player = false;
}
EffectSprite.prototype = Object.create(Phaser.Sprite.prototype);
EffectSprite.prototype.constructor = EffectSprite;
var AISprite = function(game, x, y, key, frame) {
this.viewRectangle = function() {
var offset = [];
var size = [];
var multiplier = 1.0;
if ( hasState(this, STATE_ALERTED) ) {
multiplier = 2.0;
}
if ( hasState(this, STATE_FACE_LEFT) ) {
offset = [0, -32 * multiplier];
size = [-this.view_distance, 96];
} else if ( hasState(this, STATE_FACE_RIGHT) ) {
offset = [32, -32 * multiplier];
size = [32 + this.view_distance, 96];
} else if ( hasState(this, STATE_FACE_DOWN) ) {
offset = [-32 * multiplier, 32];
size = [96, this.view_distance];
} else if ( hasState(this, STATE_FACE_UP) ) {
offset = [-32 * multiplier, 0];
size = [96, -this.view_distance];
} else {
console.log("I don't have a facing state?");
return null;
}
size[0] *= multiplier;
size[1] *= multiplier;
return positiveRectangle(this.x + offset[0],
this.y + offset[1],
size[0],
size[1]);
}
this.canSeeSprite = function(spr, debug) {
var vd = this.view_distance;
if ( hasState(this, STATE_FACE_LEFT) ||
hasState(this, STATE_FACE_UP) ) {
// Without this the player can stand in our view distance
// but as long as their left edge is 1 px out we won't see them,
// with this we see their near edge
vd = vd + 32;
}
if ( hasState(this, STATE_ALERTED) )
vd = vd * 2;
var distance = (new Phaser.Line(spr.x, spr.y, this.x, this.y).length);
if ( distance > vd ) {
console.log("Target is outside my view distance (" + distance + " vs " + vd + ")");
return false;
}
var viewrect = this.viewRectangle();
if ( viewrect == null ) {
console.log("I don't have a view rectangle");
return false;
}
var sprrect = positiveRectangle(spr.x, spr.y, 32, 32);
if ( viewrect.intersects(sprrect) || viewrect.containsRect(sprrect) ) {
return true;
}
console.log("I have a view rectangle but it does not intersect or contain the target");
console.log(viewrect, sprrect);
return false;
}
this.enableAwarenessChange = function(state) {
this.awareness_change_enabled = true;
}
this.startAwarenessTimer = function() {
this.awareness_change_enabled = false;
if ( this.awareness_timer !== null )
this.awareness_timer.stop();
this.awareness_timer = game.time.create(false);
this.awareness_timer.add(this.sprite_awareness_duration,
this.enableAwarenessChange,
this);
this.awareness_timer.start()
}
this.setAwarenessEffect = function(state) {
var animkey = "";
if ( hasState(this, state) == true ) {
// restart the awareness timer
this.startAwarenessTimer();
return;
} else if ( (state == STATE_LOSTHIM) &&
(hasState(this, STATE_ALERTED) == false) &&
(hasState(this, STATE_CONCERNED) == false) ) {
return;
}
if ( this.awareness_change_enabled == false &&
state != STATE_ALERTED ) {
return;
}
this.startAwarenessTimer();
setAwarenessState(this, state);
if ( this.awareness_effect !== null ) {
this.awareness_effect.alive = false;
this.awareness_effect.destroy();
this.awareness_effect = null;
}
if ( state == STATE_ALERTED ) {
animkey = "alerted";
} else if ( state == STATE_CONCERNED ) {
animkey = "concerned";
} else if ( state == STATE_LOSTHIM ) {
if ( this.sprite_group == "townsfolk-guard" ) {
animkey = "angry";
} else {
animkey = "relieved";
}
}
if ( animkey == "" )
return;
this.bubble_immediate = true;
this.clearWordBubble();
this.awareness_effect = game.state.states.game.add.sprite(
this.x + 16,
this.y - 16,
'balloon');
addAnimation(this.awareness_effect, animkey);
this.awareness_effect.play(animkey, null, false, true);
}
this.enableWordBubble = function() {
this.enable_word_bubble = true;
this.timer = game.time.create(false);
if ( this.bubble_immediate == true ) {
this.bubble_immediate = false;
this.setWordBubble();
} else {
var timerdelta = 10000 + (game.rnd.integerInRange(0, 20) * 1000);
timerev = this.timer.add(timerdelta, this.setWordBubble, this);
this.timer.start()
}
}
this.clearWordBubble = function() {
if ( this.bubble_text !== null )
this.clear_bubble = true;
this.enable_word_bubble = false;
this.timer = game.time.create(false);
timerev = this.timer.add(1000, this.enableWordBubble, this);
this.timer.start()
}
this.setWordBubble = function()
{
if ( this.bubble_text !== null ||
this.sprite_group == undefined ||
this.enable_world_bubble == false) {
return;
}
aistate = this.state & ( STATE_UNAWARE | STATE_CONCERNED | STATE_ALERTED | STATE_LOSTHIM );
switch ( aistate ) {
case STATE_UNAWARE: {
aistate = "unaware";
break;
}
case STATE_CONCERNED: {
aistate = "concerned";
break;
}
case STATE_ALERTED: {
aistate = "alerted";
break;
}
case STATE_LOSTHIM: {
aistate = "losthim";
break;
}
}
var mylines = moonlightDialog['status'][this.sprite_group][aistate];
bubbleimg = game.cache.getImage('wordbubble');
text = mylines[game.rnd.integerInRange(0, mylines.length-1)];
style = {font: '14px Arial Bold', fill: '#ffffff'}
this.text_size = stringSize(text, style['font']);
this.bubble_sprite = game.add.sprite(this.x, this.y, 'wordbubble');
this.bubble_sprite.anchor.setTo(0.5, 1.0);
this.bubble_sprite.scale.x = Number((this.text_size[0] + 16) / bubbleimg.width);
this.bubble_sprite.scale.y = Number((this.text_size[1] + 16) / bubbleimg.height);
this.bubble_text = game.add.text(this.x, this.y, text, style);
this.snap_bubble_position();
this.timer = game.time.create(false);
timerev = this.timer.add(5000, this.clearWordBubble, this);
this.timer.start()
}
this.snap_bubble_position = function()
{
this.bubble_sprite.x = this.x + 16;
this.bubble_sprite.y = this.y;
var tx = this.bubble_sprite.x - (this.text_size[0]/2);
var ty = this.bubble_sprite.y - (this.bubble_sprite.height) + 8;
this.bubble_text.position.x = tx;
this.bubble_text.position.y = ty;
}
this.blocked = function() {
function f() {
if ( hasState(this, STATE_FACE_LEFT) &&
this.body.blocked.left == true )
return true;
if ( hasState(this, STATE_FACE_RIGHT) &&
this.body.blocked.right == true )
return true;
if ( hasState(this, STATE_FACE_DOWN) &&
this.body.blocked.down == true )
return true;
if ( hasState(this, STATE_FACE_UP) &&
this.body.blocked.up == true )
return true;
return false;
}
console.log("this.blocked? " + f());
return f();
}
this.path_purge = function() {
this.path = [];
this.path_index = 0;
}
this.path_set = function(target, force) {
force = ( typeof force == undefined ? false : force );
if ( force == false &&
this.path.length > 0 &&
this.path_index < this.path_maximum_steps ) {
return false;
}
this.path_purge();
tpath = pathfinder.findPath(
parseInt(this.x/32),
parseInt(this.y/32),
parseInt(target.x/32),
parseInt(target.y/32),
pathfinder_grid.clone()
);
prevpoint = [this.x, this.y];
for ( var i = 0 ; i < tpath.length ; i++ ) {
if ( (prevpoint[0]+prevpoint[1]) == ((tpath[i][0]*32)+(tpath[i][1]*32)) )
continue;
this.path.push(new Phaser.Line(prevpoint[0], prevpoint[1],
tpath[i][0]*32, tpath[i][1]*32));
prevpoint = [tpath[i][0]*32, tpath[i][1]*32];
}
console.log("New path");
console.log(this.path);
return true;
}
this.path_tween_start = function(movingstate)
{
movingState = (typeof movementState == undefined ? movementState : (STATE_MOVING | STATE_RUNNING));
this.path_tweens = [];
prevpos = [this.x, this.y]
for ( var i = 0;
i < Math.min(this.path_maximum_steps, this.path.length) ;
i++ ) {
pl = this.path[i];
movingstate = STATE_MOVING | STATE_RUNNING;
if ( pl.end.x < prevpos[0]) {
movingstate = movingstate | STATE_FACE_LEFT;
} else if ( pl.end.x > prevpos[0] ) {
movingstate = movingstate | STATE_FACE_RIGHT;
}
if ( pl.end.y < prevpos[1] ) {
movingstate = movingstate | STATE_FACE_UP;
} else if ( pl.end.y > prevpos[1] ) {
movingstate = movingstate | STATE_FACE_DOWN;
}
prevpos = [pl.end.x, pl.end.y];
tween = game.add.tween(this);
tween.movingstate = movingstate;
this.path_tweens.push(tween);
tween.to(
{x: (pl.end.x), y: (pl.end.y)},
(TWEEN_DURATION_PERPIXEL_RUNNING * pl.length),
null);
tween.onStart.add(function() {
setMovingState(this._object, this.movingstate);
this._object.animations.play("bipedrun" + spriteFacing(this._object));
}, tween);
tween.onComplete.add(function() {
this._object.path_index += 1;
setMovingState(this._object, getFaceState(this._object));
this._object.animations.play("bipedrun" + spriteFacing(this._object));
this._object.animations.stop();
}, tween);
if ( i > 0 ) {
this.path_tweens[i-1].onComplete.add(tween.start,
tween);
}
}
console.log(this.path_tweens);
if ( this.path_tweens.length > 0 )
this.path_tweens[0].start();
}
this.path_tween_stop = function()
{
this.path_tweens.forEach(function(x) {
x.stop();
game.tweens.remove(x);
}, this);
}
this.turnUnseenDirection = function() {
if ( this.seen_directions.length >= 4 )
this.seen_directions = [];
var directions = [STATE_FACE_DOWN, STATE_FACE_LEFT,
STATE_FACE_RIGHT, STATE_FACE_UP];
var newdirection = directions[game.rnd.integerInRange(0, 3)];
while ( this.seen_directions.indexOf(newdirection) !== -1 ) {
newdirection = directions[game.rnd.integerInRange(0, 3)];
}
console.log("Setting new direction to " + newdirection);
setMovingState(this, newdirection);
this.animations.stop();
this.animations.play("bipedrun" + spriteFacing(this));
this.animations.stop();
if ( this.rotation_timer !== null ) {
this.rotation_timer.stop();
this.rotation_timer = null;
}
}
this.chasetarget = function(target, alertedState, movingstate, visual)
{
alertedState = (typeof alertedState == undefined ? alertedState : STATE_ALERTED);
visual = (typeof visual == undefined ? visual : false);
if ( game.physics.arcade.collide(this, target) )
return;
if ( this.path_index >= this.path.length ) {
this.path_tween_stop();
console.log("I am at the end of my path");
if ( (visual == false) || (this.canSeeSprite(target, false) == true )) {
console.log("I can see the target");
this.setAwarenessEffect(alertedState);
this.path_set(target, true);
this.path_tween_start(movingstate);
} else {
if ( this.rotation_timer == null ) {
console.log("I can't see the target - turning so I can");
this.rotation_timer = game.time.create(false);
timerev = this.rotation_timer.add(250, this.turnUnseenDirection, this);
this.rotation_timer.start()
}
}
} else {
if ( this.path_set(target, this.blocked(true)) == true ) {
console.log("I just got a new path");
if ( (visual == false) || (this.canSeeSprite(target, false) == false )) {
this.path_purge();
this.path_tween_stop();
} else {
this.setAwarenessEffect(alertedState);
this.path_tween_start(movingstate);
}
}
}
}
this.action_chaseplayer = function()
{
var movingstate = STATE_NONE;
this.action_chasetarget(player,
STATE_ALERTED,
STATE_MOVING | STATE_RUNNING,
true);
return;
}
this.action_reportplayer = function()
{
console.log("I AM REPORTING THE PLAYER");
setSpriteMovement(this);
}
this.action_huntplayer = function()
{
console.log("I AM HUNTING FOR THE PLAYER");
setSpriteMovement(this);
}
this.action_wander = function()
{
var newstate = STATE_NONE;
if ( this.sprite_canmove == false) {
return;
}
if ( game.rnd.integerInRange(0, 100) < 95 )
return;
this.turnUnseenDirection();
addState(this, STATE_MOVING);
setSpriteMovement(this);
}
this.update = function()
{
if ( this.ready_to_update == false )
return;
if ( this.awareness_effect !== null ) {
if ( this.awareness_effect.alive == false ) {
this.awareness_effect.destroy();
this.awareness_effect = null;
} else {
this.awareness_effect.x = this.x + 16;
this.awareness_effect.y = this.y - 16;
}
}
if ( this.bubble_text !== null ) {
if ( this.clear_bubble == true ) {
this.bubble_text.destroy();
this.bubble_sprite.destroy();
this.bubble_text = null;
this.bubble_sprite = null;
this.clear_bubble = false;
} else {
this.snap_bubble_position();
}
}
if ( hasState(this, STATE_ALERTED) ) {
if ( this.sprite_group == "townsfolk-guard" ) {
this.action_chaseplayer();
} else {
this.action_reportplayer();
}
} else if ( hasAnyState(this, [STATE_CONCERNED, STATE_LOSTHIM]) ) {
this.action_huntplayer();
} else {
this.action_wander();
}
}
this.update_new_values = function() {
if ( this.timer !== null )
this.timer.stop();
this.animations.destroy();
this.clearWordBubble();
this.state = STATE_UNAWARE;
this.sprite_can_see_lightmeter = Number(this.sprite_can_see_lightmeter);
this.sprite_canmove = parseBoolean(this.sprite_canmove);
this.sprite_awareness_duration = parseInt(this.sprite_awareness_duration);
this.collide_with_player = parseBoolean(this.collide_with_player);
this.collide_with_map = parseBoolean(this.collide_with_map);
this.carries_light = parseBoolean(this.carries_light);
this.path_maximum_steps = parseInt(this.path_maximum_steps);
this.loadTexture(this.sprite_name, 0);
addAnimation(this, 'bipedwalkleft');
addAnimation(this, 'bipedwalkright');
addAnimation(this, 'bipedwalkup');
addAnimation(this, 'bipedwalkdown');
addAnimation(this, 'bipedrunleft');
addAnimation(this, 'bipedrunright');
addAnimation(this, 'bipedrunup');
addAnimation(this, 'bipedrundown');
setMovingState(this, STATE_FACE_DOWN);
setSpriteMovement(this);
this.ready_to_update = true;
}
var spritenames_by_type = [
'townsfolk-male-1',
'townsfolk-male-2',
'townsfolk-male-3',
'townsfolk-male-4',
'townsfolk-female-1',
'townsfolk-female-2',
'townsfolk-female-3',
'townsfolk-female-4',
'townsfolk-guard-1',
'townsfolk-guard-2'
];
this.ready_to_update = false;
Phaser.Sprite.call(this, game, x, y, null);
game.physics.arcade.enable(this);
this.body.immovable = true;
pathfinder_grid = [];
this.walkables = [];
this.path = [];
this.path_tweens = [];
this.path_maximum_steps = 4;
this.awareness_change_enabled = true;
this.lightmeter = 1.0;
this.sprite_can_see_lightmeter = 0.3;
this.awareness_effect = null;
this.awareness_timer = null;
this.seen_directions = [];
this.sprite_awareness_duration = 60000;
this.sprite_canmove = 'true';
this.collide_with_player = 'true';
this.collide_with_map = 'true';
this.carries_light = 'false';
this.view_distance = 32 * 5;
this.timer = null;
this.rotation_timer = null;
this.origin = new Phaser.Point(x, y);
this.bubble_immediate = false;
this.bubble_text = null;
this.enable_word_bubble = false;
this.body.collideWorldBounds = true;
this.sprite_name = "townsfolk-male-1";
this.sprite_group = "townsfolk-male";
this.update_new_values();
}
AISprite.prototype = Object.create(Phaser.Sprite.prototype);
AISprite.prototype.constructor = AISprite;
function rotatePoints(arr, x, y, degrees)
{
arr.forEach(function(p) {
p.rotate(x, y, degrees, true);
}, this);
}
function positiveRectangle(x, y, w, h) {
if ( w < 0 ) {
w = -(w);
x = x - w;
}
if ( h < 0 ) {
h = -(h);
y = y - h;
}
return new Phaser.Rectangle(x, y, w, h);
}
function addAnimation(obj, anim)
{
a = moonlightSettings['animations'][anim]
obj.animations.add(anim, a['frames'], a['speed'], a['loop'])
}
var GameState = function(game) {
}
GameState.prototype.create = function()
{
this.map = this.add.tilemap('map');
for (var k in moonlightSettings['map']['tilesets']) {
var ts = moonlightSettings['map']['tilesets'][k];
this.map.addTilesetImage(ts['name']);
}
this.map_collision_layers = [];
pfgrid = [];
for (var ln in moonlightSettings['map']['layers']) {
lp = moonlightSettings['map']['layers'][ln];
if ( lp['type'] == "tiles" ) {
layer = this.map.createLayer(ln);
this.map.setCollisionBetween(
lp['collisionBetween'][0],
lp['collisionBetween'][1],
lp['collides'],
ln
);
if ( lp['inject_sprites'] == true ) {
this.aiSprites = game.add.group();
this.aiSprites.debug = true;
this.map.createFromObjects('AI', 3544, 'player', 0, true, false, this.aiSprites, AISprite);
this.aiSprites.forEach(function(spr) {
spr.update_new_values();
}, this)
player = this.add.sprite((19 * 32), (21 * 32), 'player');
player.lightmeter = 0;
};
if ( lp['collides'] == true ) {
this.map_collision_layers.push(layer);
console.log(layer);
for (var i = 0; i < layer.layer.data.length; i++)
{
if ( i >= pfgrid.length )
pfgrid[i] = [];
for (var j = 0; j < layer.layer.data[i].length; j++)
{
if (layer.layer.data[i][j].index > 0) {
pfgrid[i][j] = 1;
} else if ( pfgrid[i][j] != 1 ) {
pfgrid[i][j] = 0;
}
}
}
}
layer.resizeWorld();
}
}
console.log(pfgrid)
pathfinder_grid = new PF.Grid(this.map.width,
this.map.height,
pfgrid);
pathfinder = new PF.AStarFinder({allowDiagonal: false});
console.log(pathfinder_grid);
console.log(pathfinder);
this.physics.arcade.enable(player);
player.body.center = new Phaser.Point(player.body.width / 2, player.body.height + player.body.halfHeight);
player.body.collideWorldBounds = true;
//player.body.immovable = true;
addAnimation(player, 'bipedwalkleft');
addAnimation(player, 'bipedwalkright');
addAnimation(player, 'bipedwalkup');
addAnimation(player, 'bipedwalkdown');
addAnimation(player, 'bipedrunleft');
addAnimation(player, 'bipedrunright');
addAnimation(player, 'bipedrunup');
addAnimation(player, 'bipedrundown');
this.camera.follow(player, Phaser.Camera.FOLLOW_TOPDOWN);
controls = game.input.keyboard.createCursorKeys();
this.effectSprites = game.add.group();
this.map.createFromObjects('EffectSprites', 5, 'player', 0, true, false, this.effectSprites, EffectSprite);
this.effectSprites.forEach(function(spr) {
spr.update_new_values();
}, this)
this.shadowTexture = game.add.bitmapData(game.world.width, game.world.height);
// drop this lower to make the map darker
this.shadowTextureColor = 'rgb(60, 60, 60)';
this.shadowSprite = game.add.image(0, 0, this.shadowTexture);
this.shadowSprite.blendMode = Phaser.blendModes.MULTIPLY;
this.staticLights = game.add.group();
this.map.createFromObjects('Lights', 97, 'player', 0, true, false, this.staticLights, Light);
this.staticLights.forEach(function(light) {
light.update_new_values();
}, this)
this.staticSounds = game.add.group();
this.map.createFromObjects('Sounds', 11, 'player', 0, true, false, this.staticSounds, SoundSprite);
this.staticSounds.forEach(function(snd) {
snd.update_new_values();
}, this)
this.bubble_group = game.add.group();
this.uigroup = game.add.group();
this.game.time.advancedTiming = true;
this.fpsText = this.game.add.text(
20, 20, '', { font: '16px Arial', fill: '#ffffff' }, this.uigroup
);
this.lightbox = this.game.add.image(game.camera.width / 2 - 50,
game.camera.height - 40,
'lightbox',
0,
this.uigroup);
this.lightbar = this.game.add.image(this.lightbox.x + 3,
this.lightbox.y + 3,
'lightbar',
0,
this.uigroup);
this.lightbar_image = game.cache.getImage('lightbar');
this.lightbar_crop = positiveRectangle(0,
0,
this.lightbar_image.width,
this.lightbar_image.height);
this.uigroup.setAll('fixedToCamera', true);
}
GameState.prototype.updateShadowTexture = function() {
this.shadowTexture.context.fillStyle = this.shadowTextureColor;
this.shadowTexture.context.fillRect(0, 0, game.world.width, game.world.height);
this.staticLights.forEach(function(light) {
if ( light.always_render !== true ) {
var r1 = positiveRectangle(this.game.camera.x,
this.game.camera.y,
this.game.camera.width,
this.game.camera.height);
if ( ! light.rect.intersects(r1) ) {
return;
}
}
if ( light.flicker ) {
var radius = light.radius + game.rnd.integerInRange(1,10);
} else {
var radius = light.radius;
}
light.rendered_radius = radius;
var gradient =
this.shadowTexture.context.createRadialGradient(
light.x + 16, light.y + 16, light.fade,
light.x + 16, light.y + 16, radius);
gradient.addColorStop(0, light.color_start);
gradient.addColorStop(1, light.color_stop);
this.shadowTexture.context.beginPath();
this.shadowTexture.context.fillStyle = gradient;
this.shadowTexture.context.arc(light.x + 16, light.y + 16, radius, 0, Math.PI*2);
this.shadowTexture.context.fill();
}, this);
this.shadowTexture.dirty = true;
};
function getFaceState(spr)
{
if ( hasState(spr, STATE_FACE_LEFT) )
return STATE_FACE_LEFT;
if ( hasState(spr, STATE_FACE_RIGHT) )
return STATE_FACE_RIGHT;
if ( hasState(spr, STATE_FACE_DOWN) )
return STATE_FACE_DOWN;
if ( hasState(spr, STATE_FACE_UP) )
return STATE_FACE_UP;
}
function getMoveState(spr)
{
return ( hasState(spr, STATE_MOVING) ||
hasState(spr, STATE_RUNNING) );
}
function delState(spr, state)
{
if ( hasState(spr, state) )
spr.state = spr.state ^ state;
}
function addState(spr, state)
{
spr.state = spr.state | state;
}
function setMovingState(spr, state)
{
delState(spr, STATE_FACE_LEFT);
delState(spr, STATE_FACE_RIGHT);
delState(spr, STATE_FACE_DOWN);
delState(spr, STATE_FACE_UP);
delState(spr, STATE_MOVING);
delState(spr, STATE_RUNNING);
addState(spr, state);
}
function setAwarenessState(spr, state)
{
delState(spr, STATE_UNAWARE);
delState(spr, STATE_CONCERNED);
delState(spr, STATE_ALERTED);
delState(spr, STATE_LOSTHIM);
addState(spr, state);
}
function exchangeState(spr, state1, state2)
{
delState(spr, state1);
addState(spr, state2);
}
function hasAnyState(spr, states)
{
var hasstate = false;
states.forEach(function(x) {
if ( hasState(spr, x) )
hasstate = true;
}, this);
return hasstate;
}
function hasState(spr, state)
{
if ( (spr.state & state) == state )
return true;
return false;
}
function spriteFacing(spr)
{
if ( hasState(spr, STATE_FACE_LEFT) )
return "left";
if ( hasState(spr, STATE_FACE_RIGHT) )
return "right";
if ( hasState(spr, STATE_FACE_DOWN) )
return "down";
if ( hasState(spr, STATE_FACE_UP) )
return "up";
}
function parseBoolean(val)
{
return ( val == 'true' || val == true );
}
function setSpriteMovement(spr, velocity)
{
var x = 0;
var y = 0;
var dir = spriteFacing(spr);
velocity = ( typeof velocity == undefined ? velocity : [SPEED_WALKING,
SPEED_RUNNING] );
spr.body.setSize(16, 16, 8, 16);
if ( hasState(spr, STATE_RUNNING) ) {
if ( velocity !== false )
velocity = velocity[1];
console.log("Playing bipedrun" + dir);
spr.animations.play("bipedrun" + dir);
} else if ( hasState(spr, STATE_MOVING) ) {
if ( velocity !== false )
velocity = velocity[0];
console.log("Playing bipedwalk" + dir);
spr.animations.play("bipedwalk" + dir);
} else {
if ( velocity !== false ) {
spr.body.velocity.x = 0;
spr.body.velocity.y = 0;
}
spr.animations.stop();
return;
}
if ( velocity !== false ) {
if ( dir == "left" ) {
spr.body.velocity.x = -(velocity * velocity);
spr.body.velocity.y = 0;
} else if ( dir == "right" ) {
spr.body.velocity.x = (velocity * velocity);
spr.body.velocity.y = 0;
} else if ( dir == "up" ) {
spr.body.velocity.x = 0;
spr.body.velocity.y = -(velocity * velocity);
} else if ( dir == "down" ) {
spr.body.velocity.x = 0;
spr.body.velocity.y = (velocity * velocity);
}
}
}
GameState.prototype.check_input = function()
{
player.body.velocity.x = 0;
player.body.velocity.y = 0;
velocityMod = 0;
var newstate = 0;
if ( controls.up.isDown) {
if ( controls.up.shiftKey ) {
newstate = (STATE_FACE_UP | STATE_MOVING | STATE_RUNNING);
} else {
newstate = (STATE_FACE_UP | STATE_MOVING );
}
} else if ( controls.down.isDown ) {
if ( controls.down.shiftKey ) {
newstate = (STATE_FACE_DOWN | STATE_MOVING | STATE_RUNNING);
} else {
newstate = (STATE_FACE_DOWN | STATE_MOVING );
}
} else if ( controls.left.isDown ) {
if ( controls.left.shiftKey ) {
newstate = (STATE_FACE_LEFT | STATE_MOVING | STATE_RUNNING);
} else {
newstate = (STATE_FACE_LEFT | STATE_MOVING );
}
} else if ( controls.right.isDown ) {
if ( controls.right.shiftKey ) {
newstate = (STATE_FACE_RIGHT | STATE_MOVING | STATE_RUNNING);
} else {
newstate = (STATE_FACE_RIGHT | STATE_MOVING );
}
} else {
newstate = STATE_NONE;
}
setMovingState(player, newstate);
setSpriteMovement(player);
}
GameState.prototype.update_player_lightmeter = function() {
lightValue = 0;
this.staticLights.forEach(function(light) {
var left = player.x;
var top = player.y + 32;
if ( player.y < this.y )
top = player.y;
if ( player.x + this.x )
left = player.x + 32;
line = new Phaser.Line(left, top, light.x + 16, light.y + 16);
if ( line.length > light.rendered_radius)
return;
var length = line.length;
var lv = light.light_meter - (Number(length) / Number(light.rendered_radius));
if ( lv > lightValue ) {
lightValue = lv;
}
}, this)
player.lightmeter = lightValue;
this.lightbar_crop.width = (this.lightbar_image.width * lightValue);
this.lightbar.crop(this.lightbar_crop);
}
GameState.prototype.update = function()
{
this.check_input();
this.update_player_lightmeter();
for (var ln in this.map_collision_layers ) {
layer = this.map_collision_layers[ln];
this.physics.arcade.collide(player, layer);
}
function _fix_audio_relative(x) {
x.adjust_relative_to(player);
}
this.staticSounds.forEach(_fix_audio_relative, this);
function _inner_collide(x) {
if ( x.collide_with_map == true ) {
for ( var ln in this.map_collision_layers ) {
layer = this.map_collision_layers[ln];
this.physics.arcade.collide(x, layer);
}
}
if ( x.collide_with_player == false )
return;
if ( x.canSeeSprite(player, false) == true ) {
if ( this.physics.arcade.collide(x, player) ) {
x.setAwarenessEffect(STATE_ALERTED);
} else if ( player.lightmeter >= x.sprite_can_see_lightmeter ) {
x.setAwarenessEffect(STATE_ALERTED);
} else {
x.setAwarenessEffect(STATE_CONCERNED);
}
return;
} else {
if ( hasState(x, STATE_LOSTHIM) == false ) {
x.setAwarenessEffect(STATE_LOSTHIM);
} else {
x.setAwarenessEffect(STATE_UNAWARE);
}
}
this.physics.arcade.collide(x, player);
}
this.effectSprites.forEach(_inner_collide, this);
this.aiSprites.forEach(_inner_collide, this);
this.updateShadowTexture();
if ( this.aiSprites.debug == true ) {
function _draw_viewrect(x) {
var r = x.viewRectangle();
if ( r == null )
return;
this.shadowTexture.context.fillStyle = 'rgb(128, 128, 128)';
this.shadowTexture.context.fillRect(r.left,
r.top,
r.width,
r.height);
}
this.aiSprites.forEach(_draw_viewrect, this);
function _draw_aipath(x) {
var p = x.path;
if ( p == null )
return;
this.shadowTexture.context.fillStyle = 'rgb(255, 128, 128)';
p.forEach(function(r) {
this.shadowTexture.context.fillRect(r.start.x,
r.start.y,
r.end.x - r.start.x,
r.end.y - r.start.y);
}, this);
}
this.aiSprites.forEach(_draw_aipath, this);
}
if (game.time.fps !== 0) {
this.fpsText.setText(game.time.fps + ' FPS');
}
}
function Boot()
{
Phaser.State.call(game, this);
}
var Boot = function(game) {
}
Boot.prototype.preload = function()
{
game.load.image('preloader', 'gfx/ui/preloader.png');
};
Boot.prototype.create = function()
{
this.input.maxPointers = 1;
this.stage.disableVisibilityChange = false;
this.stage.scale.pageAlignHoritzontally = true;
game.state.start('preloader', true, false);
}
var Preloader = function(game) {
}
Preloader.prototype.preload = function()
{
this.preloadBar = game.add.sprite(0, 0, 'preloader');
this.preloadBar.anchor.setTo(0.5, 0.5);
this.preloadBar.x = game.camera.x + (game.camera.width / 2);
this.preloadBar.y = game.camera.y + (game.camera.width / 2);
game.load.setPreloadSprite(this.preloadBar, 0);
for (var k in moonlightSettings['map']['tilesets']) {
var ts = moonlightSettings['map']['tilesets'][k];
this.load.image(ts['name'], ts['path']);
}
for (var k in moonlightSettings['images']) {
var i = moonlightSettings['images'][k];
this.load.image(i['name'], i['path']);
}
for (var k in moonlightSettings['sounds']) {
var s = moonlightSettings['sounds'][k];
this.load.audio(s['name'], s['path']);
}
for (var k in moonlightSettings['spritesheets']) {
var s = moonlightSettings['spritesheets'][k]
game.load.spritesheet(s['name'], s['path'], s['width'], s['height'], s['frames'])
}
this.load.tilemap('map',
moonlightSettings['map']['path'],
null,
Phaser.Tilemap.TILED_JSON);
}
Preloader.prototype.create = function()
{
function goalready() {
this.preloadBar.destroy();
game.state.start('game', true, false);
}
var tween = this.add.tween(this.preloadBar).to({ alpha: 0 }, 1000, Phaser.Easing.Linear.None, true);
tween.onComplete.add(goalready, this);
}
game.state.add('boot', Boot, false);
game.state.add('preloader', Preloader, false);
game.state.add('game', GameState, false);
game.state.start('boot');
| Refactor chasing code into a generic target chaser
| moonlight/js/moonlight-skulk.js | Refactor chasing code into a generic target chaser | <ide><path>oonlight/js/moonlight-skulk.js
<ide> this.action_chaseplayer = function()
<ide> {
<ide> var movingstate = STATE_NONE;
<del> this.action_chasetarget(player,
<del> STATE_ALERTED,
<del> STATE_MOVING | STATE_RUNNING,
<del> true);
<add> this.chasetarget(player,
<add> STATE_ALERTED,
<add> STATE_MOVING | STATE_RUNNING,
<add> true);
<ide> return;
<ide> }
<ide> |
|
Java | agpl-3.0 | 45e7106602b8c7a9131df8da9c409d116f701c9e | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 6b882232-2e61-11e5-9284-b827eb9e62be | hello.java | 6b82ae38-2e61-11e5-9284-b827eb9e62be | 6b882232-2e61-11e5-9284-b827eb9e62be | hello.java | 6b882232-2e61-11e5-9284-b827eb9e62be | <ide><path>ello.java
<del>6b82ae38-2e61-11e5-9284-b827eb9e62be
<add>6b882232-2e61-11e5-9284-b827eb9e62be |
|
Java | apache-2.0 | e03245e5c45c1e213ebe68f9c8d6155decb50a2b | 0 | alphafoobar/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,caot/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,supersven/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,salguarnieri/intellij-community,dslomov/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,gnuhub/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,allotria/intellij-community,dslomov/intellij-community,pwoodworth/intellij-community,ryano144/intellij-community,kdwink/intellij-community,robovm/robovm-studio,vladmm/intellij-community,fitermay/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,ftomassetti/intellij-community,signed/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,adedayo/intellij-community,samthor/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,fengbaicanhe/intellij-community,caot/intellij-community,hurricup/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,petteyg/intellij-community,supersven/intellij-community,izonder/intellij-community,kdwink/intellij-community,samthor/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,semonte/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,diorcety/intellij-community,kool79/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,petteyg/intellij-community,allotria/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,adedayo/intellij-community,semonte/intellij-community,tmpgit/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,da1z/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,kool79/intellij-community,ryano144/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,gnuhub/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,da1z/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,wreckJ/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,allotria/intellij-community,slisson/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,TangHao1987/intellij-community,holmes/intellij-community,kool79/intellij-community,izonder/intellij-community,orekyuu/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,retomerz/intellij-community,hurricup/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,ryano144/intellij-community,slisson/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,signed/intellij-community,blademainer/intellij-community,allotria/intellij-community,slisson/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,hurricup/intellij-community,Lekanich/intellij-community,wreckJ/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,ibinti/intellij-community,vladmm/intellij-community,vladmm/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,xfournet/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,robovm/robovm-studio,izonder/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,retomerz/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,wreckJ/intellij-community,slisson/intellij-community,signed/intellij-community,vvv1559/intellij-community,ryano144/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,MichaelNedzelsky/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,kool79/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,samthor/intellij-community,samthor/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,izonder/intellij-community,clumsy/intellij-community,asedunov/intellij-community,kdwink/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,tmpgit/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,izonder/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,asedunov/intellij-community,supersven/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,dslomov/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,semonte/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,wreckJ/intellij-community,fnouama/intellij-community,allotria/intellij-community,caot/intellij-community,supersven/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,vladmm/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,fitermay/intellij-community,apixandru/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,kool79/intellij-community,dslomov/intellij-community,amith01994/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,blademainer/intellij-community,fnouama/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,samthor/intellij-community,allotria/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,wreckJ/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,da1z/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,fitermay/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,diorcety/intellij-community,da1z/intellij-community,clumsy/intellij-community,kdwink/intellij-community,vladmm/intellij-community,izonder/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,samthor/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,apixandru/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,TangHao1987/intellij-community,lucafavatella/intellij-community,nicolargo/intellij-community,holmes/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,allotria/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,supersven/intellij-community,caot/intellij-community,fitermay/intellij-community,petteyg/intellij-community,supersven/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,petteyg/intellij-community,fitermay/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,caot/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,SerCeMan/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,signed/intellij-community,vladmm/intellij-community,adedayo/intellij-community,signed/intellij-community,allotria/intellij-community,da1z/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,semonte/intellij-community,da1z/intellij-community,blademainer/intellij-community,adedayo/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,ftomassetti/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,salguarnieri/intellij-community,signed/intellij-community,vladmm/intellij-community,signed/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,amith01994/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,michaelgallacher/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,allotria/intellij-community,ibinti/intellij-community,amith01994/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,ftomassetti/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,jagguli/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,da1z/intellij-community,diorcety/intellij-community,petteyg/intellij-community,caot/intellij-community,vvv1559/intellij-community,Distrotech/intellij-community,alphafoobar/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,gnuhub/intellij-community,caot/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,slisson/intellij-community,blademainer/intellij-community,holmes/intellij-community,tmpgit/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,caot/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,kdwink/intellij-community,robovm/robovm-studio,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,allotria/intellij-community,semonte/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,semonte/intellij-community,kdwink/intellij-community,ryano144/intellij-community,muntasirsyed/intellij-community,slisson/intellij-community,gnuhub/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,xfournet/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,clumsy/intellij-community,signed/intellij-community,nicolargo/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,hurricup/intellij-community,fnouama/intellij-community,izonder/intellij-community,TangHao1987/intellij-community,SerCeMan/intellij-community,MichaelNedzelsky/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,signed/intellij-community,asedunov/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,holmes/intellij-community,diorcety/intellij-community,ryano144/intellij-community,diorcety/intellij-community,fnouama/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,jagguli/intellij-community,slisson/intellij-community,supersven/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,TangHao1987/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,supersven/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,kool79/intellij-community,apixandru/intellij-community,kool79/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,izonder/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,hurricup/intellij-community,robovm/robovm-studio,hurricup/intellij-community,orekyuu/intellij-community,samthor/intellij-community,robovm/robovm-studio,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,suncycheng/intellij-community,signed/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,clumsy/intellij-community,ivan-fedorov/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,apixandru/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,holmes/intellij-community,vladmm/intellij-community,dslomov/intellij-community,FHannes/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,holmes/intellij-community,slisson/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,retomerz/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,kool79/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,caot/intellij-community,amith01994/intellij-community,jagguli/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,da1z/intellij-community,blademainer/intellij-community,clumsy/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,petteyg/intellij-community,semonte/intellij-community,petteyg/intellij-community,nicolargo/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,slisson/intellij-community,fnouama/intellij-community,robovm/robovm-studio,xfournet/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,tmpgit/intellij-community,ThiagoGarciaAlves/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,diorcety/intellij-community,retomerz/intellij-community,apixandru/intellij-community,fnouama/intellij-community,clumsy/intellij-community,xfournet/intellij-community | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vfs.local;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.vfs.impl.local.FileWatcher;
import com.intellij.openapi.vfs.impl.local.LocalFileSystemImpl;
import com.intellij.openapi.vfs.newvfs.BulkFileListener;
import com.intellij.openapi.vfs.newvfs.NewVirtualFile;
import com.intellij.openapi.vfs.newvfs.events.*;
import com.intellij.openapi.vfs.newvfs.impl.VirtualDirectoryImpl;
import com.intellij.testFramework.PlatformLangTestCase;
import com.intellij.util.Alarm;
import com.intellij.util.Function;
import com.intellij.util.TimeoutUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.messages.MessageBusConnection;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.*;
import static com.intellij.openapi.util.io.IoTestUtil.*;
public class FileWatcherTest extends PlatformLangTestCase {
private static final int INTER_RESPONSE_DELAY = 500; // time to wait for a next event in a sequence
private static final int NATIVE_PROCESS_DELAY = 60000; // time to wait for a native watcher response
private static Logger LOG = Logger.getInstance("#com.intellij.openapi.vfs.impl.local.FileWatcher");
private FileWatcher myWatcher;
private LocalFileSystem myFileSystem;
private MessageBusConnection myConnection;
private volatile boolean myAccept = false;
private Alarm myAlarm;
private final Runnable myNotifier = new Runnable() {
@Override
public void run() {
LOG.debug("-- (event, expected=" + myAccept + ")");
if (!myAccept) return;
myAlarm.cancelAllRequests();
myAlarm.addRequest(new Runnable() {
@Override
public void run() {
myAccept = false;
LOG.debug("** waiting finished");
synchronized (myWaiter) {
myWaiter.notifyAll();
}
}
}, INTER_RESPONSE_DELAY);
}
};
private final Object myWaiter = new Object();
private int myTimeout = NATIVE_PROCESS_DELAY;
private final List<VFileEvent> myEvents = new ArrayList<VFileEvent>();
@Override
protected void setUp() throws Exception {
LOG.debug("================== setting up " + getName() + " ==================");
super.setUp();
myFileSystem = LocalFileSystem.getInstance();
assertNotNull(myFileSystem);
myWatcher = ((LocalFileSystemImpl)myFileSystem).getFileWatcher();
assertNotNull(myWatcher);
assertFalse(myWatcher.isOperational());
myWatcher.startup(myNotifier);
assertTrue(myWatcher.isOperational());
myAlarm = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, getProject());
myTimeout = NATIVE_PROCESS_DELAY;
myConnection = ApplicationManager.getApplication().getMessageBus().connect();
myConnection.subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener.Adapter() {
@Override
public void after(@NotNull List<? extends VFileEvent> events) {
synchronized (myEvents) {
myEvents.addAll(events);
}
}
});
((LocalFileSystemImpl)myFileSystem).cleanupForNextTest();
LOG = FileWatcher.getLog();
LOG.debug("================== setting up " + getName() + " ==================");
}
@Override
protected void tearDown() throws Exception {
LOG.debug("================== tearing down " + getName() + " ==================");
try {
myConnection.disconnect();
myWatcher.shutdown();
assertFalse(myWatcher.isOperational());
}
finally {
myFileSystem = null;
myWatcher = null;
super.tearDown();
}
LOG.debug("================== tearing down " + getName() + " ==================");
}
public void testFileRoot() throws Exception {
File file = createTestFile("test.txt");
refresh(file);
LocalFileSystem.WatchRequest request = watch(file);
try {
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileContentChangeEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.delete(file);
assertEvent(VFileDeleteEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.writeToFile(file, "re-creation");
assertEvent(VFileCreateEvent.class, file.getAbsolutePath());
}
finally {
unwatch(request);
delete(file);
}
}
public void testNonCanonicallyNamedFileRoot() throws Exception {
if (SystemInfo.isFileSystemCaseSensitive) {
System.err.println("Ignored: case-insensitive FS required");
return;
}
File file = createTestFile("test.txt");
refresh(file);
String watchRoot = file.getAbsolutePath().toUpperCase(Locale.US);
LocalFileSystem.WatchRequest request = watch(new File(watchRoot));
try {
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileContentChangeEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.delete(file);
assertEvent(VFileDeleteEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.writeToFile(file, "re-creation");
assertEvent(VFileCreateEvent.class, file.getAbsolutePath());
}
finally {
unwatch(request);
delete(file);
}
}
public void testDirectoryRecursive() throws Exception {
File topDir = createTestDir("top");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(topDir);
try {
myAccept = true;
File subDir = createTestDir(topDir, "sub");
assertEvent(VFileCreateEvent.class, subDir.getAbsolutePath());
refresh(subDir);
myAccept = true;
File file = createTestFile(subDir, "test.txt");
assertEvent(VFileCreateEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileContentChangeEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.delete(file);
assertEvent(VFileDeleteEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.writeToFile(file, "re-creation");
assertEvent(VFileCreateEvent.class, file.getAbsolutePath());
}
finally {
unwatch(request);
delete(topDir);
}
}
public void testDirectoryFlat() throws Exception {
File topDir = createTestDir("top");
File watchedFile = createTestFile(topDir, "test.txt");
File subDir = createTestDir(topDir, "sub");
File unwatchedFile = createTestFile(subDir, "test.txt");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(topDir, false);
try {
myAccept = true;
FileUtil.writeToFile(watchedFile, "new content");
assertEvent(VFileContentChangeEvent.class, watchedFile.getAbsolutePath());
myTimeout = 10 * INTER_RESPONSE_DELAY;
myAccept = true;
FileUtil.writeToFile(unwatchedFile, "new content");
assertEvent(VFileEvent.class);
myTimeout = NATIVE_PROCESS_DELAY;
}
finally {
unwatch(request);
delete(topDir);
}
}
public void testDirectoryMixed() throws Exception {
File topDir = createTestDir("top");
File watchedFile1 = createTestFile(topDir, "test.txt");
File sub1Dir = createTestDir(topDir, "sub1");
File unwatchedFile = createTestFile(sub1Dir, "test.txt");
File sub2Dir = createTestDir(topDir, "sub2");
File sub2subDir = createTestDir(sub2Dir, "sub");
File watchedFile2 = createTestFile(sub2subDir, "test.txt");
refresh(topDir);
LocalFileSystem.WatchRequest topRequest = watch(topDir, false);
LocalFileSystem.WatchRequest subRequest = watch(sub2Dir);
try {
myAccept = true;
FileUtil.writeToFile(watchedFile1, "new content");
FileUtil.writeToFile(watchedFile2, "new content");
FileUtil.writeToFile(unwatchedFile, "new content");
assertEvent(VFileContentChangeEvent.class, watchedFile1.getAbsolutePath(), watchedFile2.getAbsolutePath());
}
finally {
unwatch(subRequest, topRequest);
delete(topDir);
}
}
public void testDirectoryNonExisting() throws Exception {
File topDir = createTestDir("top");
File subDir = new File(topDir, "subDir");
File file = new File(subDir, "file.txt");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(subDir);
try {
myAccept = true;
assertTrue(subDir.toString(), subDir.mkdir());
assertEvent(VFileCreateEvent.class, subDir.getAbsolutePath());
refresh(subDir);
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileCreateEvent.class, file.getAbsolutePath());
}
finally {
unwatch(request);
delete(topDir);
}
}
public void testIncorrectPath() throws Exception {
File topDir = createTestDir("top");
File file = createTestFile(topDir, "file.zip");
File subDir = new File(file, "sub/zip");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(subDir, false);
try {
myTimeout = 10 * INTER_RESPONSE_DELAY;
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileEvent.class);
myTimeout = NATIVE_PROCESS_DELAY;
}
finally {
unwatch(request);
delete(topDir);
}
}
public void testDirectoryOverlapping() throws Exception {
File topDir = createTestDir("top");
File fileInTopDir = createTestFile(topDir, "file1.txt");
File subDir = createTestDir(topDir, "sub");
File fileInSubDir = createTestFile(subDir, "file2.txt");
File sideDir = createTestDir("side");
File fileInSideDir = createTestFile(sideDir, "file3.txt");
refresh(topDir);
refresh(sideDir);
LocalFileSystem.WatchRequest requestForSubDir = watch(subDir);
LocalFileSystem.WatchRequest requestForSideDir = watch(sideDir);
try {
myAccept = true;
FileUtil.writeToFile(fileInTopDir, "new content");
FileUtil.writeToFile(fileInSubDir, "new content");
FileUtil.writeToFile(fileInSideDir, "new content");
assertEvent(VFileContentChangeEvent.class, fileInSubDir.getAbsolutePath(), fileInSideDir.getAbsolutePath());
LocalFileSystem.WatchRequest requestForTopDir = watch(topDir);
try {
myAccept = true;
FileUtil.writeToFile(fileInTopDir, "newer content");
FileUtil.writeToFile(fileInSubDir, "newer content");
FileUtil.writeToFile(fileInSideDir, "newer content");
assertEvent(VFileContentChangeEvent.class, fileInTopDir.getAbsolutePath(), fileInSubDir.getAbsolutePath(), fileInSideDir.getAbsolutePath());
}
finally {
unwatch(requestForTopDir);
}
myAccept = true;
FileUtil.writeToFile(fileInTopDir, "newest content");
FileUtil.writeToFile(fileInSubDir, "newest content");
FileUtil.writeToFile(fileInSideDir, "newest content");
assertEvent(VFileContentChangeEvent.class, fileInSubDir.getAbsolutePath(), fileInSideDir.getAbsolutePath());
myAccept = true;
FileUtil.delete(fileInTopDir);
FileUtil.delete(fileInSubDir);
FileUtil.delete(fileInSideDir);
assertEvent(VFileDeleteEvent.class, fileInTopDir.getAbsolutePath(), fileInSubDir.getAbsolutePath(), fileInSideDir.getAbsolutePath());
}
finally {
unwatch(requestForSubDir, requestForSideDir);
delete(topDir);
}
}
/*
public void testSymlinkAboveWatchRoot() throws Exception {
final File topDir = FileUtil.createTempDirectory("top.", null);
final File topLink = IoTestUtil.createTempLink(topDir.getAbsolutePath(), "link");
final File subDir = FileUtil.createTempDirectory(topDir, "sub.", null);
final File file = FileUtil.createTempFile(subDir, "test.", ".txt");
final File fileLink = new File(new File(topLink, subDir.getName()), file.getName());
refresh(topDir);
refresh(topLink);
final LocalFileSystem.WatchRequest request = watch(topLink);
try {
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileContentChangeEvent.class, fileLink.getAbsolutePath());
myAccept = true;
FileUtil.delete(file);
assertEvent(VFileDeleteEvent.class, fileLink.getAbsolutePath());
myAccept = true;
FileUtil.writeToFile(file, "re-creation");
assertEvent(VFileCreateEvent.class, fileLink.getAbsolutePath());
}
finally {
myFileSystem.removeWatchedRoot(request);
delete(topLink);
delete(topDir);
}
}
public void testSymlinkBelowWatchRoot() throws Exception {
final File targetDir = FileUtil.createTempDirectory("top.", null);
final File file = FileUtil.createTempFile(targetDir, "test.", ".txt");
final File linkDir = FileUtil.createTempDirectory("link.", null);
final File link = new File(linkDir, "link");
IoTestUtil.createTempLink(targetDir.getAbsolutePath(), link.getAbsolutePath());
final File fileLink = new File(link, file.getName());
refresh(targetDir);
refresh(linkDir);
final LocalFileSystem.WatchRequest request = watch(linkDir);
try {
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileContentChangeEvent.class, fileLink.getAbsolutePath());
myAccept = true;
FileUtil.delete(file);
assertEvent(VFileDeleteEvent.class, fileLink.getAbsolutePath());
myAccept = true;
FileUtil.writeToFile(file, "re-creation");
assertEvent(VFileCreateEvent.class, fileLink.getAbsolutePath());
}
finally {
myFileSystem.removeWatchedRoot(request);
delete(linkDir);
delete(targetDir);
}
}
*/
public void testSubst() throws Exception {
if (!SystemInfo.isWindows) {
System.err.println("Ignored: Windows required");
return;
}
File targetDir = createTestDir("top");
File subDir = createTestDir(targetDir, "sub");
File file = createTestFile(subDir, "test.txt");
File rootFile = createSubst(targetDir.getAbsolutePath());
VirtualDirectoryImpl.allowRootAccess(rootFile.getPath());
VirtualFile vfsRoot = myFileSystem.findFileByIoFile(rootFile);
try {
assertNotNull(rootFile.getPath(), vfsRoot);
File substDir = new File(rootFile, subDir.getName());
File substFile = new File(substDir, file.getName());
refresh(targetDir);
refresh(substDir);
LocalFileSystem.WatchRequest request = watch(substDir);
try {
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileContentChangeEvent.class, substFile.getAbsolutePath());
LocalFileSystem.WatchRequest request2 = watch(targetDir);
try {
myAccept = true;
FileUtil.delete(file);
assertEvent(VFileDeleteEvent.class, file.getAbsolutePath(), substFile.getAbsolutePath());
}
finally {
unwatch(request2);
}
myAccept = true;
FileUtil.writeToFile(file, "re-creation");
assertEvent(VFileCreateEvent.class, substFile.getAbsolutePath());
}
finally {
unwatch(request);
}
}
finally {
delete(targetDir);
deleteSubst(rootFile.getPath());
if (vfsRoot != null) {
((NewVirtualFile)vfsRoot).markDirty();
myFileSystem.refresh(false);
}
VirtualDirectoryImpl.disallowRootAccess(rootFile.getPath());
}
}
public void testDirectoryRecreation() throws Exception {
File rootDir = createTestDir("root");
File topDir = createTestDir(rootDir, "top");
File file1 = createTestFile(topDir, "file1.txt", "abc");
File file2 = createTestFile(topDir, "file2.txt", "123");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(rootDir);
try {
myAccept = true;
assertTrue(FileUtil.delete(topDir));
assertTrue(topDir.mkdir());
TimeoutUtil.sleep(100);
assertTrue(file1.createNewFile());
assertTrue(file2.createNewFile());
assertEvent(VFileContentChangeEvent.class, file1.getPath(), file2.getPath());
}
finally {
unwatch(request);
delete(topDir);
}
}
public void testWatchRootRecreation() throws Exception {
File rootDir = createTestDir("root");
File file1 = createTestFile(rootDir, "file1.txt", "abc");
File file2 = createTestFile(rootDir, "file2.txt", "123");
refresh(rootDir);
LocalFileSystem.WatchRequest request = watch(rootDir);
try {
myAccept = true;
assertTrue(FileUtil.delete(rootDir));
assertTrue(rootDir.mkdir());
if (SystemInfo.isLinux) TimeoutUtil.sleep(1500); // implementation specific
assertTrue(file1.createNewFile());
assertTrue(file2.createNewFile());
assertEvent(VFileContentChangeEvent.class, file1.getPath(), file2.getPath());
}
finally {
unwatch(request);
delete(rootDir);
}
}
public void testWatchRootRenameRemove() throws Exception {
File topDir = createTestDir("top");
File rootDir = createTestDir(topDir, "root");
File rootDir2 = new File(topDir, "_" + rootDir.getName());
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(rootDir);
try {
myAccept = true;
assertTrue(rootDir.renameTo(rootDir2));
assertEvent(VFileEvent.class, rootDir.getPath(), rootDir2.getPath());
myAccept = true;
assertTrue(rootDir2.renameTo(rootDir));
assertEvent(VFileEvent.class, rootDir.getPath(), rootDir2.getPath());
myAccept = true;
assertTrue(FileUtil.delete(rootDir));
assertEvent(VFileDeleteEvent.class, rootDir.getPath());
myAccept = true;
assertTrue(rootDir.mkdirs());
assertEvent(VFileCreateEvent.class, rootDir.getPath());
myAccept = true;
assertTrue(FileUtil.delete(topDir));
assertEvent(VFileDeleteEvent.class, topDir.getPath());
// todo[r.sh] current VFS implementation loses watch root once it's removed; this probably should be fixed
myAccept = true;
assertTrue(rootDir.mkdirs());
assertEvent(VFileCreateEvent.class);
}
finally {
unwatch(request);
delete(topDir);
}
}
public void testSwitchingToFsRoot() throws Exception {
File topDir = createTestDir("top");
File rootDir = createTestDir(topDir, "root");
File file1 = createTestFile(topDir, "1.txt");
File file2 = createTestFile(rootDir, "2.txt");
refresh(topDir);
File fsRoot = new File(SystemInfo.isUnix ? "/" : topDir.getPath().substring(0, topDir.getPath().indexOf(File.separatorChar)) + "\\");
assertTrue("can't guess root of " + topDir, fsRoot.exists());
LocalFileSystem.WatchRequest request = watch(rootDir);
try {
myAccept = true;
FileUtil.writeToFile(file1, "abc");
FileUtil.writeToFile(file2, "abc");
assertEvent(VFileContentChangeEvent.class, file2.getPath());
LocalFileSystem.WatchRequest rootRequest = watch(fsRoot);
try {
myTimeout = 10 * INTER_RESPONSE_DELAY;
myAccept = true;
FileUtil.writeToFile(file1, "12345");
FileUtil.writeToFile(file2, "12345");
assertEvent(VFileContentChangeEvent.class, file1.getPath(), file2.getPath());
myTimeout = NATIVE_PROCESS_DELAY;
}
finally {
unwatch(rootRequest);
}
myAccept = true;
FileUtil.writeToFile(file1, "");
FileUtil.writeToFile(file2, "");
assertEvent(VFileContentChangeEvent.class, file2.getPath());
}
finally {
unwatch(request);
}
myTimeout = 10 * INTER_RESPONSE_DELAY;
myAccept = true;
FileUtil.writeToFile(file1, "xyz");
FileUtil.writeToFile(file2, "xyz");
assertEvent(VFileEvent.class);
myTimeout = NATIVE_PROCESS_DELAY;
}
public void testLineBreaksInName() throws Exception {
if (!SystemInfo.isUnix) {
System.err.println("Ignored: Unix required");
return;
}
File topDir = createTestDir("topDir");
File testDir = createTestDir(topDir, "weird\ndir\nname");
File testFile = createTestFile(testDir, "weird\nfile\nname");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(topDir);
try {
myAccept = true;
FileUtil.writeToFile(testFile, "abc");
assertEvent(VFileContentChangeEvent.class, testFile.getPath());
}
finally {
unwatch(request);
}
}
public void testHiddenFiles() throws Exception {
if (!SystemInfo.isWindows) {
System.err.println("Ignored: Windows required");
return;
}
File topDir = createTestDir("topDir");
File testDir = createTestDir(topDir, "dir");
File testFile = createTestFile(testDir, "file", "123");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(topDir);
try {
myAccept = true;
setHidden(testFile.getPath(), true);
assertEvent(VFilePropertyChangeEvent.class, testFile.getPath());
}
finally {
unwatch(request);
}
}
public void testFileCaseChange() throws Exception {
if (SystemInfo.isFileSystemCaseSensitive) {
System.err.println("Ignored: case-insensitive FS required");
return;
}
File topDir = createTestDir("topDir");
File testFile = createTestFile(topDir, "file.txt", "123");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(topDir);
try {
myAccept = true;
File newFile = new File(testFile.getParent(), StringUtil.capitalize(testFile.getName()));
FileUtil.rename(testFile, newFile);
assertEvent(VFilePropertyChangeEvent.class, newFile.getPath());
}
finally {
unwatch(request);
}
}
public void testPartialRefresh() throws Exception {
// tests the same scenario with an active file watcher (prevents explicit marking of refreshed paths)
File top = createTestDir("top");
LocalFileSystemTest.doTestPartialRefresh(top);
}
@NotNull
private LocalFileSystem.WatchRequest watch(File watchFile) {
return watch(watchFile, true);
}
@NotNull
private LocalFileSystem.WatchRequest watch(final File watchFile, final boolean recursive) {
final Ref<LocalFileSystem.WatchRequest> request = Ref.create();
getEvents("events to add watch " + watchFile, new Runnable() {
@Override
public void run() {
request.set(myFileSystem.addRootToWatch(watchFile.getAbsolutePath(), recursive));
}
});
assertFalse(request.isNull());
assertFalse(myWatcher.isSettingRoots());
return request.get();
}
private void unwatch(final LocalFileSystem.WatchRequest... requests) {
getEvents("events to stop watching", new Runnable() {
@Override
public void run() {
myFileSystem.removeWatchedRoots(Arrays.asList(requests));
}
});
}
private VirtualFile refresh(File file) {
VirtualFile vFile = myFileSystem.refreshAndFindFileByIoFile(file);
assertNotNull(file.toString(), vFile);
VfsUtilCore.visitChildrenRecursively(vFile, new VirtualFileVisitor() {
@Override
public boolean visitFile(@NotNull VirtualFile file) {
file.getChildren();
return true;
}
});
return vFile;
}
private void delete(File file) throws IOException {
VirtualFile vFile = myFileSystem.findFileByIoFile(file);
if (vFile != null) {
AccessToken token = ApplicationManager.getApplication().acquireWriteActionLock(getClass());
try {
vFile.delete(this);
}
finally {
token.finish();
}
}
if (file.exists()) {
FileUtil.delete(file);
}
}
private List<VFileEvent> getEvents(String msg, @Nullable Runnable action) {
LOG.debug("** waiting for " + msg);
myAccept = true;
if (action != null) {
action.run();
}
int timeout = myTimeout;
try {
synchronized (myWaiter) {
//noinspection WaitNotInLoop
myWaiter.wait(timeout);
}
}
catch (InterruptedException e) {
LOG.warn(e);
}
LOG.debug("** waited for " + timeout);
myFileSystem.refresh(false);
ArrayList<VFileEvent> result;
synchronized (myEvents) {
result = new ArrayList<VFileEvent>(myEvents);
myEvents.clear();
}
LOG.debug("** events: " + result.size());
return result;
}
private void assertEvent(Class<? extends VFileEvent> type, String... paths) {
List<VFileEvent> events = getEvents(type.getSimpleName(), null);
assertEquals(events.toString(), paths.length, events.size());
Set<String> pathSet = ContainerUtil.map2Set(paths, new Function<String, String>() {
@Override
public String fun(final String path) {
return FileUtil.toSystemIndependentName(path);
}
});
for (VFileEvent event : events) {
assertTrue(event.toString(), type.isInstance(event));
VirtualFile eventFile = event.getFile();
assertNotNull(event.toString(), eventFile);
assertTrue(eventFile + " not in " + Arrays.toString(paths), pathSet.remove(eventFile.getPath()));
}
}
}
| platform/platform-tests/testSrc/com/intellij/openapi/vfs/local/FileWatcherTest.java | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vfs.local;
import com.intellij.openapi.application.AccessToken;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.vfs.impl.local.FileWatcher;
import com.intellij.openapi.vfs.impl.local.LocalFileSystemImpl;
import com.intellij.openapi.vfs.newvfs.BulkFileListener;
import com.intellij.openapi.vfs.newvfs.NewVirtualFile;
import com.intellij.openapi.vfs.newvfs.events.*;
import com.intellij.openapi.vfs.newvfs.impl.VirtualDirectoryImpl;
import com.intellij.testFramework.PlatformLangTestCase;
import com.intellij.util.Alarm;
import com.intellij.util.Function;
import com.intellij.util.TimeoutUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.messages.MessageBusConnection;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.*;
import static com.intellij.openapi.util.io.IoTestUtil.*;
public class FileWatcherTest extends PlatformLangTestCase {
private static final int INTER_RESPONSE_DELAY = 500; // time to wait for a next event in a sequence
private static final int NATIVE_PROCESS_DELAY = 60000; // time to wait for a native watcher response
private static Logger LOG = Logger.getInstance("#com.intellij.openapi.vfs.impl.local.FileWatcher");
private FileWatcher myWatcher;
private LocalFileSystem myFileSystem;
private MessageBusConnection myConnection;
private volatile boolean myAccept = false;
private Alarm myAlarm;
private final Runnable myNotifier = new Runnable() {
@Override
public void run() {
LOG.debug("-- (event, expected=" + myAccept + ")");
if (!myAccept) return;
myAlarm.cancelAllRequests();
myAlarm.addRequest(new Runnable() {
@Override
public void run() {
myAccept = false;
LOG.debug("** waiting finished");
synchronized (myWaiter) {
myWaiter.notifyAll();
}
}
}, INTER_RESPONSE_DELAY);
}
};
private final Object myWaiter = new Object();
private int myTimeout = NATIVE_PROCESS_DELAY;
private final List<VFileEvent> myEvents = new ArrayList<VFileEvent>();
@Override
protected void setUp() throws Exception {
LOG.debug("================== setting up " + getName() + " ==================");
super.setUp();
myFileSystem = LocalFileSystem.getInstance();
assertNotNull(myFileSystem);
myWatcher = ((LocalFileSystemImpl)myFileSystem).getFileWatcher();
assertNotNull(myWatcher);
assertFalse(myWatcher.isOperational());
myWatcher.startup(myNotifier);
assertTrue(myWatcher.isOperational());
myAlarm = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, getProject());
myTimeout = NATIVE_PROCESS_DELAY;
myConnection = ApplicationManager.getApplication().getMessageBus().connect();
myConnection.subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener.Adapter() {
@Override
public void after(@NotNull List<? extends VFileEvent> events) {
synchronized (myEvents) {
myEvents.addAll(events);
}
}
});
((LocalFileSystemImpl)myFileSystem).cleanupForNextTest();
LOG = FileWatcher.getLog();
LOG.debug("================== setting up " + getName() + " ==================");
}
@Override
protected void tearDown() throws Exception {
LOG.debug("================== tearing down " + getName() + " ==================");
try {
myConnection.disconnect();
myWatcher.shutdown();
assertFalse(myWatcher.isOperational());
}
finally {
myFileSystem = null;
myWatcher = null;
super.tearDown();
}
LOG.debug("================== tearing down " + getName() + " ==================");
}
public void testFileRoot() throws Exception {
File file = createTestFile("test.txt");
refresh(file);
LocalFileSystem.WatchRequest request = watch(file);
try {
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileContentChangeEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.delete(file);
assertEvent(VFileDeleteEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.writeToFile(file, "re-creation");
assertEvent(VFileCreateEvent.class, file.getAbsolutePath());
}
finally {
unwatch(request);
delete(file);
}
}
public void testNonCanonicallyNamedFileRoot() throws Exception {
if (SystemInfo.isFileSystemCaseSensitive) {
System.err.println("Ignored: case-insensitive FS required");
return;
}
File file = createTestFile("test.txt");
refresh(file);
String watchRoot = file.getAbsolutePath().toUpperCase(Locale.US);
LocalFileSystem.WatchRequest request = watch(new File(watchRoot));
try {
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileContentChangeEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.delete(file);
assertEvent(VFileDeleteEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.writeToFile(file, "re-creation");
assertEvent(VFileCreateEvent.class, file.getAbsolutePath());
}
finally {
unwatch(request);
delete(file);
}
}
public void testDirectoryRecursive() throws Exception {
File topDir = createTestDir("top");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(topDir);
try {
myAccept = true;
File subDir = createTestDir(topDir, "sub");
assertEvent(VFileCreateEvent.class, subDir.getAbsolutePath());
refresh(subDir);
myAccept = true;
File file = createTestFile(subDir, "test.txt");
assertEvent(VFileCreateEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileContentChangeEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.delete(file);
assertEvent(VFileDeleteEvent.class, file.getAbsolutePath());
myAccept = true;
FileUtil.writeToFile(file, "re-creation");
assertEvent(VFileCreateEvent.class, file.getAbsolutePath());
}
finally {
unwatch(request);
delete(topDir);
}
}
public void testDirectoryFlat() throws Exception {
File topDir = createTestDir("top");
File watchedFile = createTestFile(topDir, "test.txt");
File subDir = createTestDir(topDir, "sub");
File unwatchedFile = createTestFile(subDir, "test.txt");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(topDir, false);
try {
myAccept = true;
FileUtil.writeToFile(watchedFile, "new content");
assertEvent(VFileContentChangeEvent.class, watchedFile.getAbsolutePath());
myTimeout = 10 * INTER_RESPONSE_DELAY;
myAccept = true;
FileUtil.writeToFile(unwatchedFile, "new content");
assertEvent(VFileEvent.class);
myTimeout = NATIVE_PROCESS_DELAY;
}
finally {
unwatch(request);
delete(topDir);
}
}
public void testDirectoryMixed() throws Exception {
File topDir = createTestDir("top");
File watchedFile1 = createTestFile(topDir, "test.txt");
File sub1Dir = createTestDir(topDir, "sub1");
File unwatchedFile = createTestFile(sub1Dir, "test.txt");
File sub2Dir = createTestDir(topDir, "sub2");
File sub2subDir = createTestDir(sub2Dir, "sub");
File watchedFile2 = createTestFile(sub2subDir, "test.txt");
refresh(topDir);
LocalFileSystem.WatchRequest topRequest = watch(topDir, false);
LocalFileSystem.WatchRequest subRequest = watch(sub2Dir);
try {
myAccept = true;
FileUtil.writeToFile(watchedFile1, "new content");
FileUtil.writeToFile(watchedFile2, "new content");
FileUtil.writeToFile(unwatchedFile, "new content");
assertEvent(VFileContentChangeEvent.class, watchedFile1.getAbsolutePath(), watchedFile2.getAbsolutePath());
}
finally {
unwatch(subRequest, topRequest);
delete(topDir);
}
}
public void testDirectoryNonExisting() throws Exception {
File topDir = createTestDir("top");
File subDir = new File(topDir, "subDir");
File file = new File(subDir, "file.txt");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(subDir);
try {
myAccept = true;
assertTrue(subDir.toString(), subDir.mkdir());
assertEvent(VFileCreateEvent.class, subDir.getAbsolutePath());
refresh(subDir);
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileCreateEvent.class, file.getAbsolutePath());
}
finally {
unwatch(request);
delete(topDir);
}
}
public void testIncorrectPath() throws Exception {
File topDir = createTestDir("top");
File file = createTestFile(topDir, "file.zip");
File subDir = new File(file, "sub/zip");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(subDir, false);
try {
myTimeout = 10 * INTER_RESPONSE_DELAY;
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileEvent.class);
myTimeout = NATIVE_PROCESS_DELAY;
}
finally {
unwatch(request);
delete(topDir);
}
}
public void testDirectoryOverlapping() throws Exception {
File topDir = createTestDir("top");
File fileInTopDir = createTestFile(topDir, "file1.txt");
File subDir = createTestDir(topDir, "sub");
File fileInSubDir = createTestFile(subDir, "file2.txt");
File sideDir = createTestDir("side");
File fileInSideDir = createTestFile(sideDir, "file3.txt");
refresh(topDir);
refresh(sideDir);
LocalFileSystem.WatchRequest requestForSubDir = watch(subDir);
LocalFileSystem.WatchRequest requestForSideDir = watch(sideDir);
try {
myAccept = true;
FileUtil.writeToFile(fileInTopDir, "new content");
FileUtil.writeToFile(fileInSubDir, "new content");
FileUtil.writeToFile(fileInSideDir, "new content");
assertEvent(VFileContentChangeEvent.class, fileInSubDir.getAbsolutePath(), fileInSideDir.getAbsolutePath());
LocalFileSystem.WatchRequest requestForTopDir = watch(topDir);
try {
myAccept = true;
FileUtil.writeToFile(fileInTopDir, "newer content");
FileUtil.writeToFile(fileInSubDir, "newer content");
FileUtil.writeToFile(fileInSideDir, "newer content");
assertEvent(VFileContentChangeEvent.class, fileInTopDir.getAbsolutePath(), fileInSubDir.getAbsolutePath(), fileInSideDir.getAbsolutePath());
}
finally {
unwatch(requestForTopDir);
}
myAccept = true;
FileUtil.writeToFile(fileInTopDir, "newest content");
FileUtil.writeToFile(fileInSubDir, "newest content");
FileUtil.writeToFile(fileInSideDir, "newest content");
assertEvent(VFileContentChangeEvent.class, fileInSubDir.getAbsolutePath(), fileInSideDir.getAbsolutePath());
myAccept = true;
FileUtil.delete(fileInTopDir);
FileUtil.delete(fileInSubDir);
FileUtil.delete(fileInSideDir);
assertEvent(VFileDeleteEvent.class, fileInTopDir.getAbsolutePath(), fileInSubDir.getAbsolutePath(), fileInSideDir.getAbsolutePath());
}
finally {
unwatch(requestForSubDir, requestForSideDir);
delete(topDir);
}
}
/*
public void testSymlinkAboveWatchRoot() throws Exception {
final File topDir = FileUtil.createTempDirectory("top.", null);
final File topLink = IoTestUtil.createTempLink(topDir.getAbsolutePath(), "link");
final File subDir = FileUtil.createTempDirectory(topDir, "sub.", null);
final File file = FileUtil.createTempFile(subDir, "test.", ".txt");
final File fileLink = new File(new File(topLink, subDir.getName()), file.getName());
refresh(topDir);
refresh(topLink);
final LocalFileSystem.WatchRequest request = watch(topLink);
try {
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileContentChangeEvent.class, fileLink.getAbsolutePath());
myAccept = true;
FileUtil.delete(file);
assertEvent(VFileDeleteEvent.class, fileLink.getAbsolutePath());
myAccept = true;
FileUtil.writeToFile(file, "re-creation");
assertEvent(VFileCreateEvent.class, fileLink.getAbsolutePath());
}
finally {
myFileSystem.removeWatchedRoot(request);
delete(topLink);
delete(topDir);
}
}
public void testSymlinkBelowWatchRoot() throws Exception {
final File targetDir = FileUtil.createTempDirectory("top.", null);
final File file = FileUtil.createTempFile(targetDir, "test.", ".txt");
final File linkDir = FileUtil.createTempDirectory("link.", null);
final File link = new File(linkDir, "link");
IoTestUtil.createTempLink(targetDir.getAbsolutePath(), link.getAbsolutePath());
final File fileLink = new File(link, file.getName());
refresh(targetDir);
refresh(linkDir);
final LocalFileSystem.WatchRequest request = watch(linkDir);
try {
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileContentChangeEvent.class, fileLink.getAbsolutePath());
myAccept = true;
FileUtil.delete(file);
assertEvent(VFileDeleteEvent.class, fileLink.getAbsolutePath());
myAccept = true;
FileUtil.writeToFile(file, "re-creation");
assertEvent(VFileCreateEvent.class, fileLink.getAbsolutePath());
}
finally {
myFileSystem.removeWatchedRoot(request);
delete(linkDir);
delete(targetDir);
}
}
*/
public void testSubst() throws Exception {
if (!SystemInfo.isWindows) {
System.err.println("Ignored: Windows required");
return;
}
File targetDir = createTestDir("top");
File subDir = createTestDir(targetDir, "sub");
File file = createTestFile(subDir, "test.txt");
File rootFile = createSubst(targetDir.getAbsolutePath());
VirtualDirectoryImpl.allowRootAccess(rootFile.getPath());
VirtualFile vfsRoot = myFileSystem.findFileByIoFile(rootFile);
try {
assertNotNull(rootFile.getPath(), vfsRoot);
File substDir = new File(rootFile, subDir.getName());
File substFile = new File(substDir, file.getName());
refresh(targetDir);
refresh(substDir);
LocalFileSystem.WatchRequest request = watch(substDir);
try {
myAccept = true;
FileUtil.writeToFile(file, "new content");
assertEvent(VFileContentChangeEvent.class, substFile.getAbsolutePath());
LocalFileSystem.WatchRequest request2 = watch(targetDir);
try {
myAccept = true;
FileUtil.delete(file);
assertEvent(VFileDeleteEvent.class, file.getAbsolutePath(), substFile.getAbsolutePath());
}
finally {
unwatch(request2);
}
myAccept = true;
FileUtil.writeToFile(file, "re-creation");
assertEvent(VFileCreateEvent.class, substFile.getAbsolutePath());
}
finally {
unwatch(request);
}
}
finally {
delete(targetDir);
deleteSubst(rootFile.getPath());
if (vfsRoot != null) {
((NewVirtualFile)vfsRoot).markDirty();
myFileSystem.refresh(false);
}
VirtualDirectoryImpl.disallowRootAccess(rootFile.getPath());
}
}
public void testDirectoryRecreation() throws Exception {
File rootDir = createTestDir("root");
File topDir = createTestDir(rootDir, "top");
File file1 = createTestFile(topDir, "file1.txt", "abc");
File file2 = createTestFile(topDir, "file2.txt", "123");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(rootDir);
try {
myAccept = true;
assertTrue(FileUtil.delete(topDir));
assertTrue(topDir.mkdir());
TimeoutUtil.sleep(100);
assertTrue(file1.createNewFile());
assertTrue(file2.createNewFile());
assertEvent(VFileContentChangeEvent.class, file1.getPath(), file2.getPath());
}
finally {
unwatch(request);
delete(topDir);
}
}
public void testWatchRootRecreation() throws Exception {
File rootDir = createTestDir("root");
File file1 = createTestFile(rootDir, "file1.txt", "abc");
File file2 = createTestFile(rootDir, "file2.txt", "123");
refresh(rootDir);
LocalFileSystem.WatchRequest request = watch(rootDir);
try {
myAccept = true;
assertTrue(FileUtil.delete(rootDir));
assertTrue(rootDir.mkdir());
if (SystemInfo.isLinux) TimeoutUtil.sleep(1500); // implementation specific
assertTrue(file1.createNewFile());
assertTrue(file2.createNewFile());
assertEvent(VFileContentChangeEvent.class, file1.getPath(), file2.getPath());
}
finally {
unwatch(request);
delete(rootDir);
}
}
public void testWatchRootRenameRemove() throws Exception {
File topDir = createTestDir("top");
File rootDir = createTestDir(topDir, "root");
File rootDir2 = new File(topDir, "_" + rootDir.getName());
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(rootDir);
try {
myAccept = true;
assertTrue(rootDir.renameTo(rootDir2));
assertEvent(VFileEvent.class, rootDir.getPath(), rootDir2.getPath());
myAccept = true;
assertTrue(rootDir2.renameTo(rootDir));
assertEvent(VFileEvent.class, rootDir.getPath(), rootDir2.getPath());
myAccept = true;
assertTrue(FileUtil.delete(rootDir));
assertEvent(VFileDeleteEvent.class, rootDir.getPath());
myAccept = true;
assertTrue(rootDir.mkdirs());
assertEvent(VFileCreateEvent.class, rootDir.getPath());
myAccept = true;
assertTrue(FileUtil.delete(topDir));
assertEvent(VFileDeleteEvent.class, topDir.getPath());
// todo[r.sh] current VFS implementation loses watch root once it's removed; this probably should be fixed
myAccept = true;
assertTrue(rootDir.mkdirs());
assertEvent(VFileCreateEvent.class);
}
finally {
unwatch(request);
delete(topDir);
}
}
public void testSwitchingToFsRoot() throws Exception {
File topDir = createTestDir("top");
File rootDir = createTestDir(topDir, "root");
File file1 = createTestFile(topDir, "1.txt");
File file2 = createTestFile(rootDir, "2.txt");
refresh(topDir);
File fsRoot = new File(SystemInfo.isUnix ? "/" : topDir.getPath().substring(0, topDir.getPath().indexOf(File.separatorChar)) + "\\");
assertTrue("can't guess root of " + topDir, fsRoot.exists());
LocalFileSystem.WatchRequest request = watch(rootDir);
try {
myAccept = true;
FileUtil.writeToFile(file1, "abc");
FileUtil.writeToFile(file2, "abc");
assertEvent(VFileContentChangeEvent.class, file2.getPath());
LocalFileSystem.WatchRequest rootRequest = watch(fsRoot);
try {
myTimeout = 10 * INTER_RESPONSE_DELAY;
myAccept = true;
FileUtil.writeToFile(file1, "12345");
FileUtil.writeToFile(file2, "12345");
assertEvent(VFileContentChangeEvent.class, file1.getPath(), file2.getPath());
myTimeout = NATIVE_PROCESS_DELAY;
}
finally {
unwatch(rootRequest);
}
myAccept = true;
FileUtil.writeToFile(file1, "");
FileUtil.writeToFile(file2, "");
assertEvent(VFileContentChangeEvent.class, file2.getPath());
}
finally {
unwatch(request);
}
myTimeout = 10 * INTER_RESPONSE_DELAY;
myAccept = true;
FileUtil.writeToFile(file1, "xyz");
FileUtil.writeToFile(file2, "xyz");
assertEvent(VFileEvent.class);
myTimeout = NATIVE_PROCESS_DELAY;
}
public void testLineBreaksInName() throws Exception {
if (!SystemInfo.isUnix) {
System.err.println("Ignored: Unix required");
return;
}
File topDir = createTestDir("topDir");
File testDir = createTestDir(topDir, "weird\ndir\nname");
File testFile = createTestFile(testDir, "weird\nfile\nname");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(topDir);
try {
myAccept = true;
FileUtil.writeToFile(testFile, "abc");
assertEvent(VFileContentChangeEvent.class, testFile.getPath());
}
finally {
unwatch(request);
}
}
public void testHiddenFiles() throws Exception {
if (!SystemInfo.isWindows) {
System.err.println("Ignored: Windows required");
return;
}
File topDir = createTestDir("topDir");
File testDir = createTestDir(topDir, "dir");
File testFile = createTestFile(testDir, "file", "123");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(topDir);
try {
myAccept = true;
setHidden(testFile.getPath(), true);
assertEvent(VFilePropertyChangeEvent.class, testFile.getPath());
}
finally {
unwatch(request);
}
}
public void testFileCaseChange() throws Exception {
if (SystemInfo.isFileSystemCaseSensitive) {
System.err.println("Ignored: case-insensitive FS required");
return;
}
File topDir = createTestDir("topDir");
File testFile = createTestFile(topDir, "file.txt", "123");
refresh(topDir);
LocalFileSystem.WatchRequest request = watch(topDir);
try {
myAccept = true;
File newFile = new File(testFile.getParent(), StringUtil.capitalize(testFile.getName()));
FileUtil.rename(testFile, newFile);
assertEvent(VFilePropertyChangeEvent.class, newFile.getPath());
}
finally {
unwatch(request);
}
}
public void testPartialRefresh() throws Exception {
// tests the same scenario with an active file watcher: this affects
File top = createTestDir("top");
LocalFileSystemTest.doTestPartialRefresh(top);
}
@NotNull
private LocalFileSystem.WatchRequest watch(File watchFile) {
return watch(watchFile, true);
}
@NotNull
private LocalFileSystem.WatchRequest watch(final File watchFile, final boolean recursive) {
final Ref<LocalFileSystem.WatchRequest> request = Ref.create();
getEvents("events to add watch " + watchFile, new Runnable() {
@Override
public void run() {
request.set(myFileSystem.addRootToWatch(watchFile.getAbsolutePath(), recursive));
}
});
assertFalse(request.isNull());
assertFalse(myWatcher.isSettingRoots());
return request.get();
}
private void unwatch(final LocalFileSystem.WatchRequest... requests) {
getEvents("events to stop watching", new Runnable() {
@Override
public void run() {
myFileSystem.removeWatchedRoots(Arrays.asList(requests));
}
});
}
private VirtualFile refresh(File file) {
VirtualFile vFile = myFileSystem.refreshAndFindFileByIoFile(file);
assertNotNull(file.toString(), vFile);
VfsUtilCore.visitChildrenRecursively(vFile, new VirtualFileVisitor() {
@Override
public boolean visitFile(@NotNull VirtualFile file) {
file.getChildren();
return true;
}
});
return vFile;
}
private void delete(File file) throws IOException {
VirtualFile vFile = myFileSystem.findFileByIoFile(file);
if (vFile != null) {
AccessToken token = ApplicationManager.getApplication().acquireWriteActionLock(getClass());
try {
vFile.delete(this);
}
finally {
token.finish();
}
}
if (file.exists()) {
FileUtil.delete(file);
}
}
private List<VFileEvent> getEvents(String msg, @Nullable Runnable action) {
LOG.debug("** waiting for " + msg);
myAccept = true;
if (action != null) {
action.run();
}
int timeout = myTimeout;
try {
synchronized (myWaiter) {
//noinspection WaitNotInLoop
myWaiter.wait(timeout);
}
}
catch (InterruptedException e) {
LOG.warn(e);
}
LOG.debug("** waited for " + timeout);
myFileSystem.refresh(false);
ArrayList<VFileEvent> result;
synchronized (myEvents) {
result = new ArrayList<VFileEvent>(myEvents);
myEvents.clear();
}
LOG.debug("** events: " + result.size());
return result;
}
private void assertEvent(Class<? extends VFileEvent> type, String... paths) {
List<VFileEvent> events = getEvents(type.getSimpleName(), null);
assertEquals(events.toString(), paths.length, events.size());
Set<String> pathSet = ContainerUtil.map2Set(paths, new Function<String, String>() {
@Override
public String fun(final String path) {
return FileUtil.toSystemIndependentName(path);
}
});
for (VFileEvent event : events) {
assertTrue(event.toString(), type.isInstance(event));
VirtualFile eventFile = event.getFile();
assertNotNull(event.toString(), eventFile);
assertTrue(eventFile + " not in " + Arrays.toString(paths), pathSet.remove(eventFile.getPath()));
}
}
}
| Cleanup (comment)
| platform/platform-tests/testSrc/com/intellij/openapi/vfs/local/FileWatcherTest.java | Cleanup (comment) | <ide><path>latform/platform-tests/testSrc/com/intellij/openapi/vfs/local/FileWatcherTest.java
<ide> }
<ide>
<ide> public void testPartialRefresh() throws Exception {
<del> // tests the same scenario with an active file watcher: this affects
<add> // tests the same scenario with an active file watcher (prevents explicit marking of refreshed paths)
<ide> File top = createTestDir("top");
<ide> LocalFileSystemTest.doTestPartialRefresh(top);
<ide> } |
|
Java | lgpl-2.1 | c0ac615c75c7277e0d10020b9ab8c8628504cbeb | 0 | milesibastos/jTDS,milesibastos/jTDS,milesibastos/jTDS,milesibastos/jTDS | //
// Copyright 1998 CDS Networks, Inc., Medford Oregon
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// 3. All advertising materials mentioning features or use of this software
// must display the following acknowledgement:
// This product includes software developed by CDS Networks, Inc.
// 4. The name of CDS Networks, Inc. may not be used to endorse or promote
// products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY CDS NETWORKS, INC. ``AS IS'' AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL CDS NETWORKS, INC. BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
// OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
// OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
// SUCH DAMAGE.
//
/**
* A Statement object is used for executing a static SQL statement and
* obtaining the results produced by it.
*
* <p>Only one ResultSet per Statement can be open at any point in time.
* Therefore, if the reading of one ResultSet is interleaved with the
* reading of another, each must have been generated by different
* Statements. All statement execute methods implicitly close a
* statement's current ResultSet if an open one exists.
*
* @see java.sql.Statement
* @see ResultSet
* @version $Id: TdsStatement.java,v 1.19 2002-08-28 08:38:54 alin_sinpalean Exp $
*/
package com.internetcds.jdbc.tds;
import java.sql.*;
public class TdsStatement implements java.sql.Statement
{
public static final String cvsVersion = "$Id: TdsStatement.java,v 1.19 2002-08-28 08:38:54 alin_sinpalean Exp $";
protected TdsConnection connection; // The connection who created us
// ResultSet currentResults = null; // The current results
protected SQLWarningChain warningChain; // The warnings chain.
protected int timeout = 0; // The timeout for a query
protected TdsResultSet results = null;
protected Tds actTds = null;
private boolean escapeProcessing = true;
protected int updateCount = -1;
private int maxFieldSize = (1<<31)-1;
private int maxRows = 0;
/**
* Set when <code>getMoreResults</code> returns <code>true</code>. In this
* case we should know we have to open the next <code>ResultSet</code> (and
* close it) if the user doesn't do it. In a way, it marks that the current
* result is the next <code>ResultSet</code>.
*/
private boolean unopenedResult = false;
private int type = ResultSet.TYPE_FORWARD_ONLY;
private int concurrency = ResultSet.CONCUR_READ_ONLY;
public TdsStatement( TdsConnection con, int type, int concurrency )
throws SQLException
{
this.connection = con;
this.warningChain = new SQLWarningChain();
this.type = type;
this.concurrency = concurrency;
}
/**
* Constructor for a Statement. It simply sets the connection
* that created us.
*
* @param connection_ the Connection instantation that creates us
* @param tds_ a TDS instance to use for communication with server.
*/
public TdsStatement( TdsConnection con )
throws SQLException
{
this( con, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY );
}
protected void eofResults() throws SQLException
{
releaseTds();
}
protected void releaseTds() throws SQLException
{
// If the connection is not in autocommit mode, don't free the Tds.
// If we do that, we won't be able to commit/rollback the statement.
if( actTds==null || !connection.getAutoCommit() )
return;
// Don't free the Tds if there are any results left.
/** @todo Check if this is correct in case an IOException occurs */
if( actTds.moreResults() )
return;
try
{
connection.freeTds(actTds);
unopenedResult = false;
actTds = null;
}
catch (TdsException e)
{
throw new SQLException("confusion in freeing Tds " + e);
}
}
private void NotImplemented() throws java.sql.SQLException
{
throw new SQLException("Not Implemented");
}
protected void finalize()
throws Throwable
{
super.finalize();
if (actTds != null)
{
close();
}
}
/**
* Execute an SQL statement that returns a single <code>ResultSet</code>.
*
* @param sql typically a static SQL SELECT statement
* @return a <code>ResultSet</code> that contains the data produced by
* the query; never <code>null</code>
* @exception SQLException if a database access error occurs
*/
public ResultSet executeQuery(String sql) throws SQLException
{
if( type==ResultSet.TYPE_FORWARD_ONLY &&
concurrency==ResultSet.CONCUR_READ_ONLY )
{
ResultSet rs = internalExecuteQuery( sql );
if( rs != null )
return rs;
else
throw new SQLException("No ResultSet was produced.");
}
else
return new freetds.CursorResultSet(this, sql);
}
final public TdsResultSet internalExecuteQuery(String sql) throws SQLException
{
return internalExecuteQuery( getTds(sql), sql );
}
/**
* This is the internal function that all subclasses should call.
* It is not executeQuery() to allow subclasses (in particular
* CursorResultSet) to override that functionality without
* breaking the internal methods.
*/
final public TdsResultSet internalExecuteQuery(Tds tds, String sql)
throws SQLException
{
if (execute(tds, sql))
{
startResultSet(tds);
}
return results;
}
/**
* Execute a SQL INSERT, UPDATE or DELETE statement. In addition
* SQL statements that return nothing such as SQL DDL statements
* can be executed
*
* Any IDs generated for AUTO_INCREMENT fields can be retrieved
* by looking through the SQLWarning chain of this statement
* for warnings of the form "LAST_INSERTED_ID = 'some number',
* COMMAND = 'your sql'".
*
* @param Sql a SQL statement
* @return either a row count, or 0 for SQL commands
* @exception SQLException if a database access error occurs
*/
public int executeUpdate(String sql) throws SQLException
{
if (execute(sql))
{
startResultSet(actTds);
closeResults();
throw new SQLException("executeUpdate can't return a result set");
}
else
{
int res = getUpdateCount();
// We should return 0 (at least that's what the javadoc above says)
return res==-1 ? 0 : res;
}
}
protected void closeResults()
throws java.sql.SQLException
{
// If we have a ResultSet that was not opened by the user although a call
// to getMoreResults was made, we should skip it, so open it.
if( unopenedResult )
startResultSet(actTds);
if (results != null)
{
results.close();
results = null;
}
}
/**
* Eats all available input from the server. Not very efficient (since it
* reads in all data by creating <code>ResultSets</code> and processing
* them), but at least it works (the old version would crash when reading in
* a row because it didn't have any information about the row's Context).
* <p>
* This could be changed to use the <code>TdsComm</code> to read in all the
* server response without processing it, but that requires some changes in
* <code>TdsComm</code>, too.
*/
private void skipToEnd(Tds tds)
throws java.sql.SQLException
{
while( getMoreResultsImpl(tds, false) || updateCount!=-1 );
}
public void commit()
throws java.sql.SQLException, java.io.IOException, com.internetcds.jdbc.tds.TdsUnknownPacketSubType, com.internetcds.jdbc.tds.TdsException
{
String sql = "IF @@TRANCOUNT > 0 COMMIT TRAN ";
if( actTds == null )
{
throw new SQLException("Statement is closed");
}
internalExecuteQuery(actTds,sql);
skipToEnd(actTds);
actTds.commit();
connection.freeTds(actTds);
actTds = null;
}
public void rollback()
throws java.sql.SQLException, java.io.IOException, com.internetcds.jdbc.tds.TdsUnknownPacketSubType, com.internetcds.jdbc.tds.TdsException
{
String sql = "IF @@TRANCOUNT > 0 ROLLBACK TRAN ";
if( actTds == null )
{
throw new SQLException("Statement is closed");
}
internalExecuteQuery(actTds,sql);
skipToEnd(actTds);
if( !actTds.autoCommit )
actTds.rollback();
else
actTds.commit();
connection.freeTds(actTds);
actTds = null;
}
/**
* In many cases, it is desirable to immediately release a
* Statement's database and JDBC resources instead of waiting
* for this to happen when it is automatically closed. The
* close method provides this immediate release.
*
* <p><B>Note:</B> A Statement is automatically closed when it is
* garbage collected. When a Statement is closed, its current
* ResultSet, if one exists, is also closed.
*
* @exception SQLException if a database access error occurs (why?)
*/
public void close() throws SQLException
{
// Already closed.
if( actTds == null )
return;
closeResults();
// now we need to relinquish the connection
if( actTds != null )
{
// Rollback any pending transactions
try
{
rollback();
}
catch (com.internetcds.jdbc.tds.TdsUnknownPacketSubType e)
{
throw new SQLException("Unknown packet. \n" + e.getMessage());
}
catch (com.internetcds.jdbc.tds.TdsException e)
{
// XXX
// ignore this for now
}
catch (java.io.IOException e)
{
// XXX
// ignore this for now
}
}
try
{
((ConnectionHelper)connection).markAsClosed(this);
}
catch(TdsException e)
{
// System.out.println("XXX: " + e.getMessage());
// throw new SQLException(e.getMessage());
// already closed by connection close ??
}
}
/**
* The maxFieldSize limit (in bytes) is the maximum amount of
* data returned for any column value; it only applies to
* BINARY, VARBINARY, LONGVARBINARY, CHAR, VARCHAR and LONGVARCHAR
* columns. If the limit is exceeded, the excess data is silently
* discarded.
*
* @return the current max column size limit; zero means unlimited
* @exception SQLException if a database access error occurs
*/
public int getMaxFieldSize() throws SQLException
{
return maxFieldSize;
}
/**
* Sets the maxFieldSize
*
* @param max the new max column size limit; zero means unlimited
* @exception SQLException if size exceeds buffer size
*/
public void setMaxFieldSize(int max) throws SQLException
{
maxFieldSize = max;
}
/**
* The maxRows limit is set to limit the number of rows that
* any ResultSet can contain. If the limit is exceeded, the
* excess rows are silently dropped.
*
* @return the current maximum row limit; zero means unlimited
* @exception SQLException if a database access error occurs
*/
public int getMaxRows() throws SQLException
{
return maxRows;
}
/**
* Set the maximum number of rows
*
* @param max the new max rows limit; zero means unlimited
* @exception SQLException if a database access error occurs
* @see #getMaxRows
*/
public void setMaxRows(int max) throws SQLException
{
if (maxRows < 0)
{
throw new SQLException("Negative row count");
}
maxRows = max;
// this.executeUpdate("set rowcount " + maxRows);
}
/**
* If escape scanning is on (the default), the driver will do escape
* substitution before sending the SQL to the database.
*
* @param enable true to enable; false to disable
* @exception SQLException if a database access error occurs
*/
public void setEscapeProcessing(boolean enable) throws SQLException
{
escapeProcessing = enable;
}
/**
* The queryTimeout limit is the number of seconds the driver
* will wait for a Statement to execute. If the limit is
* exceeded, a SQLException is thrown.
*
* @return the current query timeout limit in seconds; 0 = unlimited
* @exception SQLException if a database access error occurs
*/
public int getQueryTimeout() throws SQLException
{
return timeout;
}
/**
* Sets the queryTimeout limit
*
* @param seconds - the new query timeout limit in seconds
* @exception SQLException if a database access error occurs
*/
public void setQueryTimeout(int seconds) throws SQLException
{
timeout = seconds;
}
/**
*
* @exception SQLException
*/
public void cancel() throws SQLException
{
if (actTds == null)
{
throw new SQLException("Statement is closed");
}
try
{
actTds.cancel();
}
catch(com.internetcds.jdbc.tds.TdsException e)
{
throw new SQLException(e.getMessage());
}
catch(java.io.IOException e)
{
throw new SQLException(e.getMessage());
}
}
/**
* The first warning reported by calls on this Statement is
* returned. A Statement's execute methods clear its SQLWarning
* chain. Subsequent Statement warnings will be chained to this
* SQLWarning.
*
* <p>The Warning chain is automatically cleared each time a statement
* is (re)executed.
*
* <p><B>Note:</B> If you are processing a ResultSet then any warnings
* associated with ResultSet reads will be chained on the ResultSet
* object.
*
* @return the first SQLWarning on null
* @exception SQLException if a database access error occurs
*/
public SQLWarning getWarnings() throws SQLException
{
return warningChain.getWarnings();
}
/**
* After this call, getWarnings returns null until a new warning
* is reported for this Statement.
*
* @exception SQLException if a database access error occurs (why?)
*/
public void clearWarnings() throws SQLException
{
warningChain.clearWarnings();
}
/**
* setCursorName defines the SQL cursor name that will be used by
* subsequent execute methods. This name can then be used in SQL
* positioned update/delete statements to identify the current row
* in the ResultSet generated by this statement. If a database
* doesn't support positioned update/delete, this method is a
* no-op.
*
*
* @param name the new cursor name
* @exception SQLException if a database access error occurs
*/
public void setCursorName(String name) throws SQLException
{
NotImplemented();
}
public boolean execute(String sql) throws SQLException
{
return execute(getTds(sql),sql);
}
protected Tds getTds(String sql) throws SQLException
{
if( actTds != null )
return actTds;
else
{
actTds=connection.allocateTds();
actTds.setStatement(this);
return actTds;
}
}
/**
* @param sql any SQL statement
* @return true if the next result is a ResulSet, false if it is
* an update count or there are no more results
* @exception SQLException if a database access error occurs
*/
public boolean execute(Tds tds, String sql) throws SQLException
{
// Clear warnings before closing results, otherwise the last exception will be thrown.
clearWarnings();
closeResults();
// SAfe: Don't know if this is ok, but it's here
clearWarnings();
updateCount = -1;
// Consume all outstanding results. Otherwise it will either deadlock or
// return results from the previous query.
skipToEnd(tds);
try
{
if (escapeProcessing)
{
sql = Tds.toNativeSql(sql, tds.getServerType());
}
tds.executeQuery(sql, this, timeout);
}
catch(java.io.IOException e)
{
throw new SQLException("Network error- " + e.getMessage());
}
catch(com.internetcds.jdbc.tds.TdsException e)
{
throw new SQLException("TDS error- " + e.getMessage());
}
return getMoreResults(tds);
} // execute()
/**
* getResultSet returns the current result as a ResultSet. It
* should only be called once per result.
*
* @return the current result set; null if there are no more
* @exception SQLException if a database access error occurs
*/
public java.sql.ResultSet getResultSet() throws SQLException
{
try
{
if (actTds == null)
{
return null;
}
// There is no unopened result, we are just doing that
unopenedResult = false;
closeResults();
if (actTds.peek()==TdsDefinitions.TDS_DONEINPROC)
{
PacketResult tmp = actTds.processSubPacket();
}
if (actTds.isResultSet()) // JJ 1999-01-09 used be: ;getMoreResults())
{
startResultSet(actTds);
}
else if( actTds.isErrorPacket() )
{
throw ((PacketErrorResult)actTds.processSubPacket()).getMsg().toSQLException();
}
else if (updateCount!=-1)
{
if (! actTds.isEndOfResults())
{
// XXX
throw new SQLException("Internal error. "+
" expected EndOfResults, found 0x"
+ Integer.toHexString(actTds.peek()&0xff));
}
/* curt's version:
boolean done = false;
while (!done && tds.isEndOfResults())
{
PacketEndTokenResult end =
(PacketEndTokenResult) actTds.processSubPacket();
updateCount = end.getRowCount();
done = !end.moreResults();
}
results = null;
*/
PacketEndTokenResult end =
(PacketEndTokenResult) actTds.processSubPacket();
updateCount = end.getRowCount();
results = null;
}
else
{
// We didn't have more data and we didn't have an update count,
// now what?
throw new SQLException("Internal error. Confused");
}
}
catch(java.io.IOException e)
{
throw new SQLException(e.getMessage());
}
catch(TdsException e)
{
throw new SQLException(e.getMessage());
}
return results;
}
/**
* getUpdateCount returns the current result as an update count,
* if the result is a ResultSet or there are no more results, -1
* is returned. It should only be called once per result.
*
* @return the current result as an update count.
* @exception SQLException if a database access error occurs
*/
public int getUpdateCount() throws SQLException
{
// if (updateCount == -1)
// {
// throw new SQLException("Don't have a count yet.");
// }
// XXX This isn't correct. We need to check to see if
// the result was a result set or if there are no more results.
// If either of those are true we are supposed to return -1
return updateCount;
}
/**
* getMoreResults moves to a Statement's next result. If it returns
* true, this result is a ResulSet.
*
* @return true if the next ResultSet is valid
* @exception SQLException if a database access error occurs
*/
public boolean getMoreResults() throws SQLException
{
return getMoreResults(actTds);
}
public void handleRetStat(PacketRetStatResult packet) {
}
public void handleParamResult(PacketOutputParamResult packet) throws SQLException
{
}
public boolean getMoreResults(Tds tds) throws SQLException
{
return getMoreResultsImpl(tds, true);
}
private boolean getMoreResultsImpl(Tds tds, boolean allowTdsRelease) throws SQLException
{
updateCount = -1; // Do we need this global variable?
if (tds == null)
{
return false;
}
// Reset all internal variables (do it before checking for more results)
closeResults();
if( !tds.moreResults() )
{
if( allowTdsRelease )
releaseTds();
return false;
}
boolean result = false;
try
{
// Keep eating garbage and warnings until we reach the next result
while (true)
{
if (tds.isProcId())
{
tds.processSubPacket();
}
/* isParamToken handled by processSubPacket
else if (tds.isTextUpdate())
{
PacketResult tmp1 =
(PacketResult)tds.processSubPacket();
}
*/
else if (tds.isMessagePacket() || tds.isErrorPacket())
{
PacketMsgResult tmp = (PacketMsgResult)tds.processSubPacket();
warningChain.addOrReturn(tmp);
}
else if (tds.isRetStat()) {
handleRetStat((PacketRetStatResult)tds.processSubPacket());
}
else if (tds.isResultSet()) {
result = true;
unopenedResult = true;
break;
}
// SAfe: Only TDS_END_TOKEN should return row counts for Statements
// TDS_DONEINPROC should return row counts for PreparedStatements
else if( tds.peek()==Tds.TDS_END_TOKEN ||
(tds.getStatement() instanceof PreparedStatement &&
!(tds.getStatement() instanceof CallableStatement) &&
tds.peek()==Tds.TDS_DONEINPROC) )
{
PacketEndTokenResult end =
(PacketEndTokenResult)tds.processSubPacket();
updateCount = end.getRowCount();
if( allowTdsRelease )
releaseTds();
break;
}
// SAfe: TDS_DONEPROC and TDS_DONEINPROC should *NOT* return rowcounts
else if (tds.isEndOfResults())
{
PacketEndTokenResult end =
(PacketEndTokenResult)tds.processSubPacket();
if( !tds.moreResults() )
{
if( allowTdsRelease )
releaseTds();
break; // No more results but no update count either
}
}
else if( tds.isParamResult() )
handleParamResult((PacketOutputParamResult)tds.processSubPacket());
else if( tds.isEnvChange() )
// Process the environment change.
tds.processSubPacket();
else // process whatever comes now, isParamResult
throw new SQLException("Protocol confusion. "
+ "Got a 0x"
+ Integer.toHexString((tds.peek() & 0xff))
+ " packet");
} // end while
SQLException exception = warningChain.getExceptions();
if( exception != null )
{
try
{
/** @todo: This crashes if any rows are returned because no row context is provided */
if (result)
tds.discardResultSet(null);
releaseTds();
}
catch(java.io.IOException e)
{
throw new SQLException("Error discarding result set while processing sql error- " +
exception.getMessage() +
"\nIOException was " +
e.getMessage());
}
catch(com.internetcds.jdbc.tds.TdsException e)
{
throw new SQLException("Error discarding result set while processing sql error- " +
exception.getMessage() +
"\nIOException was " +
e.getMessage());
}
throw exception;
}
return result;
}
catch(java.io.IOException e)
{
releaseTds();
throw new SQLException("Network error- " + e.getMessage());
}
catch(com.internetcds.jdbc.tds.TdsException e)
{
releaseTds();
throw new SQLException("TDS error- " + e.getMessage());
}
}
protected void startResultSet(Tds tds)
throws SQLException
{
Columns names = null;
Columns info = null;
try
{
// We are just opening the next result set
unopenedResult = false;
while (!tds.isResultRow() && !tds.isEndOfResults())
{
PacketResult tmp = tds.processSubPacket();
if (tmp.getPacketType() == TdsDefinitions.TDS_DONEINPROC)
{
// XXX We should do something with the possible ret_stat
}
else if (tmp instanceof PacketColumnNamesResult)
{
names = ((PacketColumnNamesResult)tmp).getColumnNames();
}
else if (tmp instanceof PacketColumnInfoResult)
{
info = ((PacketColumnInfoResult)tmp).getColumnInfo();
}
else if (tmp instanceof PacketColumnOrderResult)
{
// nop
// XXX do we want to do anything with this
}
else if (tmp instanceof PacketTabNameResult)
{
// nop
// XXX What should be done with this information?
}
else if (tmp instanceof PacketControlResult)
{
// nop
// XXX do we want to do anything with this
}
else if (tmp instanceof PacketMsgResult)
{
warningChain.addOrReturn((PacketMsgResult)tmp);
}
else if (tmp instanceof PacketUnknown)
{
// XXX Need to add to the warning chain
}
else
{
throw new SQLException("Trying to get a result set. Found a "
+ tmp.getClass().getName());
}
}
warningChain.checkForExceptions();
if( !tds.isResultRow() && !tds.isEndOfResults() )
{
// XXX
throw new SQLException("Confused. Was expecting a result row. "
+ "Got a 0x" + Integer.toHexString(tds.peek() & 0xff));
}
// TDS 7.0 includes everything in one subpacket.
if (info != null)
names.merge(info);
results = new TdsResultSet( tds, this, names );
}
catch(com.internetcds.jdbc.tds.TdsException e)
{
e.printStackTrace();
throw new SQLException(e.getMessage());
}
catch( java.io.IOException e)
{
e.printStackTrace();
throw new SQLException(e.getMessage());
}
}
void fetchIntoCache() throws SQLException
{
if (results != null) {
// System.out.println("fetching into Cache !!");
results.fetchIntoCache();
}
}
//--------------------------JDBC 2.0-----------------------------
/**
* JDBC 2.0
*
* Gives the driver a hint as to the direction in which
* the rows in a result set
* will be processed. The hint applies only to result sets created
* using this Statement object. The default value is
* ResultSet.FETCH_FORWARD.
* <p>Note that this method sets the default fetch direction for
* result sets generated by this <code>Statement</code> object.
* Each result set has its own methods for getting and setting
* its own fetch direction.
* @param direction the initial direction for processing rows
* @exception SQLException if a database access error occurs
* or the given direction
* is not one of ResultSet.FETCH_FORWARD, ResultSet.FETCH_REVERSE, or
* ResultSet.FETCH_UNKNOWN
*/
public void setFetchDirection(int direction) throws SQLException
{
NotImplemented();
}
/**
* JDBC 2.0
*
* Retrieves the direction for fetching rows from
* database tables that is the default for result sets
* generated from this <code>Statement</code> object.
* If this <code>Statement</code> object has not set
* a fetch direction by calling the method <code>setFetchDirection</code>,
* the return value is implementation-specific.
*
* @return the default fetch direction for result sets generated
* from this <code>Statement</code> object
* @exception SQLException if a database access error occurs
*/
public int getFetchDirection() throws SQLException
{
NotImplemented();
return 0;
}
/**
* JDBC 2.0
*
* Gives the JDBC driver a hint as to the number of rows that should
* be fetched from the database when more rows are needed. The number
* of rows specified affects only result sets created using this
* statement. If the value specified is zero, then the hint is ignored.
* The default value is zero.
*
* @param rows the number of rows to fetch
* @exception SQLException if a database access error occurs, or the
* condition 0 <= rows <= this.getMaxRows() is not satisfied.
*/
public void setFetchSize(int rows) throws SQLException
{
NotImplemented();
}
/**
* JDBC 2.0
*
* Retrieves the number of result set rows that is the default
* fetch size for result sets
* generated from this <code>Statement</code> object.
* If this <code>Statement</code> object has not set
* a fetch size by calling the method <code>setFetchSize</code>,
* the return value is implementation-specific.
* @return the default fetch size for result sets generated
* from this <code>Statement</code> object
* @exception SQLException if a database access error occurs
*/
public int getFetchSize() throws SQLException
{
NotImplemented();
return 0;
}
/**
* JDBC 2.0
*
* Retrieves the result set concurrency.
*/
public int getResultSetConcurrency() throws SQLException
{
return concurrency;
}
/**
* JDBC 2.0
*
* Determine the result set type.
*/
public int getResultSetType() throws SQLException
{
return type;
}
/**
* JDBC 2.0
*
* Adds a SQL command to the current batch of commmands for the statement.
* This method is optional.
*
* @param sql typically this is a static SQL INSERT or UPDATE statement
* @exception SQLException if a database access error occurs, or the
* driver does not support batch statements
*/
public void addBatch( String sql ) throws SQLException
{
NotImplemented();
}
/**
* JDBC 2.0
*
* Makes the set of commands in the current batch empty.
* This method is optional.
*
* @exception SQLException if a database access error occurs or the
* driver does not support batch statements
*/
public void clearBatch() throws SQLException
{
NotImplemented();
}
/**
* JDBC 2.0
*
* Submits a batch of commands to the database for execution.
* This method is optional.
*
* @return an array of update counts containing one element for each
* command in the batch. The array is ordered according
* to the order in which commands were inserted into the batch.
* @exception SQLException if a database access error occurs or the
* driver does not support batch statements
*/
public int[] executeBatch() throws SQLException
{
NotImplemented();
return null;
}
/**
* JDBC 2.0
*
* Returns the <code>Connection</code> object
* that produced this <code>Statement</code> object.
* @return the connection that produced this statement
* @exception SQLException if a database access error occurs
*/
public java.sql.Connection getConnection() throws SQLException
{
return connection;
}
protected void changeSettings(boolean autoCommit, int transactionIsolationLevel) throws SQLException
{
if( actTds != null )
{
String query = actTds.sqlStatementForSettings(autoCommit, transactionIsolationLevel);
if( query != null )
execute(actTds, query);
}
}
static public void main(String args[])
throws java.lang.ClassNotFoundException,
java.lang.IllegalAccessException,
java.lang.InstantiationException,
SQLException
{
String query = null;
String url = url = ""
+ "jdbc:freetds:"
+ "//"
+ "kap"
+ "/"
+ "pubs";
Class.forName("com.internetcds.jdbc.tds.Driver").newInstance();
java.sql.Connection connection;
connection = DriverManager.getConnection(url,
"testuser",
"password");
java.sql.Statement stmt = connection.createStatement();
query = ""
+ "update titles "
+ " set price=price+1.00 "
+ " where title_id='MC3021' or title_id = 'BU1032' ";
int count = stmt.executeUpdate(query);
System.out.println("Updated " + count + " rows.");
query =
""
+"select price, title_id, title, price*ytd_sales gross from titles"
+" where title like 'The%'";
java.sql.ResultSet rs = stmt.executeQuery(query);
while(rs.next())
{
float price = rs.getFloat("price");
if (rs.wasNull())
{
System.out.println("price: null");
}
else
{
System.out.println("price: " + price);
}
String title_id = rs.getString("title_id");
String title = rs.getString("title");
float gross = rs.getFloat("gross");
System.out.println("id: " + title_id);
System.out.println("name: " + title);
System.out.println("gross: " + gross);
System.out.println("");
}
}
}
| src.old/main/com/internetcds/jdbc/tds/TdsStatement.java | //
// Copyright 1998 CDS Networks, Inc., Medford Oregon
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// 3. All advertising materials mentioning features or use of this software
// must display the following acknowledgement:
// This product includes software developed by CDS Networks, Inc.
// 4. The name of CDS Networks, Inc. may not be used to endorse or promote
// products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY CDS NETWORKS, INC. ``AS IS'' AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL CDS NETWORKS, INC. BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
// OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
// OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
// SUCH DAMAGE.
//
/**
* A Statement object is used for executing a static SQL statement and
* obtaining the results produced by it.
*
* <p>Only one ResultSet per Statement can be open at any point in time.
* Therefore, if the reading of one ResultSet is interleaved with the
* reading of another, each must have been generated by different
* Statements. All statement execute methods implicitly close a
* statement's current ResultSet if an open one exists.
*
* @see java.sql.Statement
* @see ResultSet
* @version $Id: TdsStatement.java,v 1.18 2002-08-28 07:44:24 alin_sinpalean Exp $
*/
package com.internetcds.jdbc.tds;
import java.sql.*;
public class TdsStatement implements java.sql.Statement
{
public static final String cvsVersion = "$Id: TdsStatement.java,v 1.18 2002-08-28 07:44:24 alin_sinpalean Exp $";
protected TdsConnection connection; // The connection who created us
// ResultSet currentResults = null; // The current results
protected SQLWarningChain warningChain; // The warnings chain.
protected int timeout = 0; // The timeout for a query
protected TdsResultSet results = null;
protected Tds actTds = null;
private boolean escapeProcessing = true;
protected int updateCount = -1;
private int maxFieldSize = (1<<31)-1;
private int maxRows = 0;
/**
* Set when <code>getMoreResults</code> returns <code>true</code>. In this
* case we should know we have to open the next <code>ResultSet</code> (and
* close it) if the user doesn't do it. In a way, it marks that the current
* result is the next <code>ResultSet</code>.
*/
private boolean unopenedResult = false;
private int type = ResultSet.TYPE_FORWARD_ONLY;
private int concurrency = ResultSet.CONCUR_READ_ONLY;
public TdsStatement( TdsConnection con, int type, int concurrency )
throws SQLException
{
this.connection = con;
this.warningChain = new SQLWarningChain();
this.type = type;
this.concurrency = concurrency;
}
/**
* Constructor for a Statement. It simply sets the connection
* that created us.
*
* @param connection_ the Connection instantation that creates us
* @param tds_ a TDS instance to use for communication with server.
*/
public TdsStatement( TdsConnection con )
throws SQLException
{
this( con, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY );
}
protected void eofResults() throws SQLException
{
releaseTds();
}
protected void releaseTds() throws SQLException
{
// If the connection is not in autocommit mode, don't free the Tds.
// If we do that, we won't be able to commit/rollback the statement.
if( actTds==null || !connection.getAutoCommit() )
return;
// Don't free the Tds if there are any results left.
/** @todo Check if this is correct in case an IOException occurs */
if( actTds.moreResults() )
return;
try
{
connection.freeTds(actTds);
unopenedResult = false;
actTds = null;
}
catch (TdsException e)
{
throw new SQLException("confusion in freeing Tds " + e);
}
}
private void NotImplemented() throws java.sql.SQLException
{
throw new SQLException("Not Implemented");
}
protected void finalize()
throws Throwable
{
super.finalize();
if (actTds != null)
{
close();
}
}
/**
* Execute an SQL statement that returns a single <code>ResultSet</code>.
*
* @param sql typically a static SQL SELECT statement
* @return a <code>ResultSet</code> that contains the data produced by
* the query; never <code>null</code>
* @exception SQLException if a database access error occurs
*/
public ResultSet executeQuery(String sql) throws SQLException
{
if( type==ResultSet.TYPE_FORWARD_ONLY &&
concurrency==ResultSet.CONCUR_READ_ONLY )
{
ResultSet rs = internalExecuteQuery( sql );
if( rs != null )
return rs;
else
throw new SQLException("No ResultSet was produced.");
}
else
return new freetds.CursorResultSet(this, sql);
}
final public TdsResultSet internalExecuteQuery(String sql) throws SQLException
{
return internalExecuteQuery( getTds(sql), sql );
}
/**
* This is the internal function that all subclasses should call.
* It is not executeQuery() to allow subclasses (in particular
* CursorResultSet) to override that functionality without
* breaking the internal methods.
*/
final public TdsResultSet internalExecuteQuery(Tds tds, String sql)
throws SQLException
{
System.out.println(this+" >>> "+tds+" >>> "+tds.comm+" >>> "+sql);
if (execute(tds, sql))
{
startResultSet(tds);
}
return results;
}
/**
* Execute a SQL INSERT, UPDATE or DELETE statement. In addition
* SQL statements that return nothing such as SQL DDL statements
* can be executed
*
* Any IDs generated for AUTO_INCREMENT fields can be retrieved
* by looking through the SQLWarning chain of this statement
* for warnings of the form "LAST_INSERTED_ID = 'some number',
* COMMAND = 'your sql'".
*
* @param Sql a SQL statement
* @return either a row count, or 0 for SQL commands
* @exception SQLException if a database access error occurs
*/
public int executeUpdate(String sql) throws SQLException
{
if (execute(sql))
{
startResultSet(actTds);
closeResults();
throw new SQLException("executeUpdate can't return a result set");
}
else
{
int res = getUpdateCount();
// We should return 0 (at least that's what the javadoc above says)
return res==-1 ? 0 : res;
}
}
protected void closeResults()
throws java.sql.SQLException
{
// If we have a ResultSet that was not opened by the user although a call
// to getMoreResults was made, we should skip it, so open it.
if( unopenedResult )
startResultSet(actTds);
if (results != null)
{
results.close();
results = null;
}
}
/**
* Eats all available input from the server. Not very efficient (since it
* reads in all data by creating <code>ResultSets</code> and processing
* them), but at least it works (the old version would crash when reading in
* a row because it didn't have any information about the row's Context).
* <p>
* This could be changed to use the <code>TdsComm</code> to read in all the
* server response without processing it, but that requires some changes in
* <code>TdsComm</code>, too.
*/
private void skipToEnd(Tds tds)
throws java.sql.SQLException
{
while( getMoreResultsImpl(tds, false) || updateCount!=-1 );
}
public void commit()
throws java.sql.SQLException, java.io.IOException, com.internetcds.jdbc.tds.TdsUnknownPacketSubType, com.internetcds.jdbc.tds.TdsException
{
String sql = "IF @@TRANCOUNT > 0 COMMIT TRAN ";
if( actTds == null )
{
throw new SQLException("Statement is closed");
}
internalExecuteQuery(actTds,sql);
skipToEnd(actTds);
actTds.commit();
connection.freeTds(actTds);
actTds = null;
}
public void rollback()
throws java.sql.SQLException, java.io.IOException, com.internetcds.jdbc.tds.TdsUnknownPacketSubType, com.internetcds.jdbc.tds.TdsException
{
String sql = "IF @@TRANCOUNT > 0 ROLLBACK TRAN ";
if( actTds == null )
{
throw new SQLException("Statement is closed");
}
internalExecuteQuery(actTds,sql);
skipToEnd(actTds);
if( !actTds.autoCommit )
actTds.rollback();
else
actTds.commit();
connection.freeTds(actTds);
actTds = null;
}
/**
* In many cases, it is desirable to immediately release a
* Statement's database and JDBC resources instead of waiting
* for this to happen when it is automatically closed. The
* close method provides this immediate release.
*
* <p><B>Note:</B> A Statement is automatically closed when it is
* garbage collected. When a Statement is closed, its current
* ResultSet, if one exists, is also closed.
*
* @exception SQLException if a database access error occurs (why?)
*/
public void close() throws SQLException
{
// Already closed.
if( actTds == null )
return;
closeResults();
// now we need to relinquish the connection
if( actTds != null )
{
// Rollback any pending transactions
try
{
rollback();
}
catch (com.internetcds.jdbc.tds.TdsUnknownPacketSubType e)
{
throw new SQLException("Unknown packet. \n" + e.getMessage());
}
catch (com.internetcds.jdbc.tds.TdsException e)
{
// XXX
// ignore this for now
}
catch (java.io.IOException e)
{
// XXX
// ignore this for now
}
}
try
{
((ConnectionHelper)connection).markAsClosed(this);
}
catch(TdsException e)
{
// System.out.println("XXX: " + e.getMessage());
// throw new SQLException(e.getMessage());
// already closed by connection close ??
}
}
/**
* The maxFieldSize limit (in bytes) is the maximum amount of
* data returned for any column value; it only applies to
* BINARY, VARBINARY, LONGVARBINARY, CHAR, VARCHAR and LONGVARCHAR
* columns. If the limit is exceeded, the excess data is silently
* discarded.
*
* @return the current max column size limit; zero means unlimited
* @exception SQLException if a database access error occurs
*/
public int getMaxFieldSize() throws SQLException
{
return maxFieldSize;
}
/**
* Sets the maxFieldSize
*
* @param max the new max column size limit; zero means unlimited
* @exception SQLException if size exceeds buffer size
*/
public void setMaxFieldSize(int max) throws SQLException
{
maxFieldSize = max;
}
/**
* The maxRows limit is set to limit the number of rows that
* any ResultSet can contain. If the limit is exceeded, the
* excess rows are silently dropped.
*
* @return the current maximum row limit; zero means unlimited
* @exception SQLException if a database access error occurs
*/
public int getMaxRows() throws SQLException
{
return maxRows;
}
/**
* Set the maximum number of rows
*
* @param max the new max rows limit; zero means unlimited
* @exception SQLException if a database access error occurs
* @see #getMaxRows
*/
public void setMaxRows(int max) throws SQLException
{
if (maxRows < 0)
{
throw new SQLException("Negative row count");
}
maxRows = max;
// this.executeUpdate("set rowcount " + maxRows);
}
/**
* If escape scanning is on (the default), the driver will do escape
* substitution before sending the SQL to the database.
*
* @param enable true to enable; false to disable
* @exception SQLException if a database access error occurs
*/
public void setEscapeProcessing(boolean enable) throws SQLException
{
escapeProcessing = enable;
}
/**
* The queryTimeout limit is the number of seconds the driver
* will wait for a Statement to execute. If the limit is
* exceeded, a SQLException is thrown.
*
* @return the current query timeout limit in seconds; 0 = unlimited
* @exception SQLException if a database access error occurs
*/
public int getQueryTimeout() throws SQLException
{
return timeout;
}
/**
* Sets the queryTimeout limit
*
* @param seconds - the new query timeout limit in seconds
* @exception SQLException if a database access error occurs
*/
public void setQueryTimeout(int seconds) throws SQLException
{
timeout = seconds;
}
/**
*
* @exception SQLException
*/
public void cancel() throws SQLException
{
if (actTds == null)
{
throw new SQLException("Statement is closed");
}
try
{
actTds.cancel();
}
catch(com.internetcds.jdbc.tds.TdsException e)
{
throw new SQLException(e.getMessage());
}
catch(java.io.IOException e)
{
throw new SQLException(e.getMessage());
}
}
/**
* The first warning reported by calls on this Statement is
* returned. A Statement's execute methods clear its SQLWarning
* chain. Subsequent Statement warnings will be chained to this
* SQLWarning.
*
* <p>The Warning chain is automatically cleared each time a statement
* is (re)executed.
*
* <p><B>Note:</B> If you are processing a ResultSet then any warnings
* associated with ResultSet reads will be chained on the ResultSet
* object.
*
* @return the first SQLWarning on null
* @exception SQLException if a database access error occurs
*/
public SQLWarning getWarnings() throws SQLException
{
return warningChain.getWarnings();
}
/**
* After this call, getWarnings returns null until a new warning
* is reported for this Statement.
*
* @exception SQLException if a database access error occurs (why?)
*/
public void clearWarnings() throws SQLException
{
warningChain.clearWarnings();
}
/**
* setCursorName defines the SQL cursor name that will be used by
* subsequent execute methods. This name can then be used in SQL
* positioned update/delete statements to identify the current row
* in the ResultSet generated by this statement. If a database
* doesn't support positioned update/delete, this method is a
* no-op.
*
*
* @param name the new cursor name
* @exception SQLException if a database access error occurs
*/
public void setCursorName(String name) throws SQLException
{
NotImplemented();
}
public boolean execute(String sql) throws SQLException
{
return execute(getTds(sql),sql);
}
protected Tds getTds(String sql) throws SQLException
{
if( actTds != null )
return actTds;
else
{
actTds=connection.allocateTds();
actTds.setStatement(this);
return actTds;
}
}
/**
* @param sql any SQL statement
* @return true if the next result is a ResulSet, false if it is
* an update count or there are no more results
* @exception SQLException if a database access error occurs
*/
public boolean execute(Tds tds, String sql) throws SQLException
{
// Clear warnings before closing results, otherwise the last exception will be thrown.
clearWarnings();
closeResults();
// SAfe: Don't know if this is ok, but it's here
clearWarnings();
updateCount = -1;
// Consume all outstanding results. Otherwise it will either deadlock or
// return results from the previous query.
skipToEnd(tds);
try
{
if (escapeProcessing)
{
sql = Tds.toNativeSql(sql, tds.getServerType());
}
tds.executeQuery(sql, this, timeout);
}
catch(java.io.IOException e)
{
throw new SQLException("Network error- " + e.getMessage());
}
catch(com.internetcds.jdbc.tds.TdsException e)
{
throw new SQLException("TDS error- " + e.getMessage());
}
return getMoreResults(tds);
} // execute()
/**
* getResultSet returns the current result as a ResultSet. It
* should only be called once per result.
*
* @return the current result set; null if there are no more
* @exception SQLException if a database access error occurs
*/
public java.sql.ResultSet getResultSet() throws SQLException
{
try
{
if (actTds == null)
{
return null;
}
// There is no unopened result, we are just doing that
unopenedResult = false;
closeResults();
if (actTds.peek()==TdsDefinitions.TDS_DONEINPROC)
{
PacketResult tmp = actTds.processSubPacket();
}
if (actTds.isResultSet()) // JJ 1999-01-09 used be: ;getMoreResults())
{
startResultSet(actTds);
}
else if( actTds.isErrorPacket() )
{
throw ((PacketErrorResult)actTds.processSubPacket()).getMsg().toSQLException();
}
else if (updateCount!=-1)
{
if (! actTds.isEndOfResults())
{
// XXX
throw new SQLException("Internal error. "+
" expected EndOfResults, found 0x"
+ Integer.toHexString(actTds.peek()&0xff));
}
/* curt's version:
boolean done = false;
while (!done && tds.isEndOfResults())
{
PacketEndTokenResult end =
(PacketEndTokenResult) actTds.processSubPacket();
updateCount = end.getRowCount();
done = !end.moreResults();
}
results = null;
*/
PacketEndTokenResult end =
(PacketEndTokenResult) actTds.processSubPacket();
updateCount = end.getRowCount();
results = null;
}
else
{
// We didn't have more data and we didn't have an update count,
// now what?
throw new SQLException("Internal error. Confused");
}
}
catch(java.io.IOException e)
{
throw new SQLException(e.getMessage());
}
catch(TdsException e)
{
throw new SQLException(e.getMessage());
}
return results;
}
/**
* getUpdateCount returns the current result as an update count,
* if the result is a ResultSet or there are no more results, -1
* is returned. It should only be called once per result.
*
* @return the current result as an update count.
* @exception SQLException if a database access error occurs
*/
public int getUpdateCount() throws SQLException
{
// if (updateCount == -1)
// {
// throw new SQLException("Don't have a count yet.");
// }
// XXX This isn't correct. We need to check to see if
// the result was a result set or if there are no more results.
// If either of those are true we are supposed to return -1
return updateCount;
}
/**
* getMoreResults moves to a Statement's next result. If it returns
* true, this result is a ResulSet.
*
* @return true if the next ResultSet is valid
* @exception SQLException if a database access error occurs
*/
public boolean getMoreResults() throws SQLException
{
return getMoreResults(actTds);
}
public void handleRetStat(PacketRetStatResult packet) {
}
public void handleParamResult(PacketOutputParamResult packet) throws SQLException
{
}
public boolean getMoreResults(Tds tds) throws SQLException
{
return getMoreResultsImpl(tds, true);
}
private boolean getMoreResultsImpl(Tds tds, boolean allowTdsRelease) throws SQLException
{
updateCount = -1; // Do we need this global variable?
if (tds == null)
{
return false;
}
// Reset all internal variables (do it before checking for more results)
closeResults();
if( !tds.moreResults() )
{
if( allowTdsRelease )
releaseTds();
return false;
}
boolean result = false;
try
{
// Keep eating garbage and warnings until we reach the next result
while (true)
{
if (tds.isProcId())
{
tds.processSubPacket();
}
/* isParamToken handled by processSubPacket
else if (tds.isTextUpdate())
{
PacketResult tmp1 =
(PacketResult)tds.processSubPacket();
}
*/
else if (tds.isMessagePacket() || tds.isErrorPacket())
{
PacketMsgResult tmp = (PacketMsgResult)tds.processSubPacket();
warningChain.addOrReturn(tmp);
}
else if (tds.isRetStat()) {
handleRetStat((PacketRetStatResult)tds.processSubPacket());
}
else if (tds.isResultSet()) {
result = true;
unopenedResult = true;
break;
}
// SAfe: Only TDS_END_TOKEN should return row counts for Statements
// TDS_DONEINPROC should return row counts for PreparedStatements
else if( tds.peek()==Tds.TDS_END_TOKEN ||
(tds.getStatement() instanceof PreparedStatement &&
!(tds.getStatement() instanceof CallableStatement) &&
tds.peek()==Tds.TDS_DONEINPROC) )
{
PacketEndTokenResult end =
(PacketEndTokenResult)tds.processSubPacket();
updateCount = end.getRowCount();
if( allowTdsRelease )
releaseTds();
break;
}
// SAfe: TDS_DONEPROC and TDS_DONEINPROC should *NOT* return rowcounts
else if (tds.isEndOfResults())
{
PacketEndTokenResult end =
(PacketEndTokenResult)tds.processSubPacket();
if( !tds.moreResults() )
{
if( allowTdsRelease )
releaseTds();
break; // No more results but no update count either
}
}
else if( tds.isParamResult() )
handleParamResult((PacketOutputParamResult)tds.processSubPacket());
else if( tds.isEnvChange() )
// Process the environment change.
tds.processSubPacket();
else // process whatever comes now, isParamResult
throw new SQLException("Protocol confusion. "
+ "Got a 0x"
+ Integer.toHexString((tds.peek() & 0xff))
+ " packet");
} // end while
SQLException exception = warningChain.getExceptions();
if( exception != null )
{
try
{
/** @todo: This crashes if any rows are returned because no row context is provided */
if (result)
tds.discardResultSet(null);
releaseTds();
}
catch(java.io.IOException e)
{
throw new SQLException("Error discarding result set while processing sql error- " +
exception.getMessage() +
"\nIOException was " +
e.getMessage());
}
catch(com.internetcds.jdbc.tds.TdsException e)
{
throw new SQLException("Error discarding result set while processing sql error- " +
exception.getMessage() +
"\nIOException was " +
e.getMessage());
}
throw exception;
}
return result;
}
catch(java.io.IOException e)
{
releaseTds();
throw new SQLException("Network error- " + e.getMessage());
}
catch(com.internetcds.jdbc.tds.TdsException e)
{
releaseTds();
throw new SQLException("TDS error- " + e.getMessage());
}
}
protected void startResultSet(Tds tds)
throws SQLException
{
Columns names = null;
Columns info = null;
try
{
// We are just opening the next result set
unopenedResult = false;
while (!tds.isResultRow() && !tds.isEndOfResults())
{
PacketResult tmp = tds.processSubPacket();
if (tmp.getPacketType() == TdsDefinitions.TDS_DONEINPROC)
{
// XXX We should do something with the possible ret_stat
}
else if (tmp instanceof PacketColumnNamesResult)
{
names = ((PacketColumnNamesResult)tmp).getColumnNames();
}
else if (tmp instanceof PacketColumnInfoResult)
{
info = ((PacketColumnInfoResult)tmp).getColumnInfo();
}
else if (tmp instanceof PacketColumnOrderResult)
{
// nop
// XXX do we want to do anything with this
}
else if (tmp instanceof PacketTabNameResult)
{
// nop
// XXX What should be done with this information?
}
else if (tmp instanceof PacketControlResult)
{
// nop
// XXX do we want to do anything with this
}
else if (tmp instanceof PacketMsgResult)
{
warningChain.addOrReturn((PacketMsgResult)tmp);
}
else if (tmp instanceof PacketUnknown)
{
// XXX Need to add to the warning chain
}
else
{
throw new SQLException("Trying to get a result set. Found a "
+ tmp.getClass().getName());
}
}
warningChain.checkForExceptions();
if( !tds.isResultRow() && !tds.isEndOfResults() )
{
// XXX
throw new SQLException("Confused. Was expecting a result row. "
+ "Got a 0x" + Integer.toHexString(tds.peek() & 0xff));
}
// TDS 7.0 includes everything in one subpacket.
if (info != null)
names.merge(info);
results = new TdsResultSet( tds, this, names );
}
catch(com.internetcds.jdbc.tds.TdsException e)
{
e.printStackTrace();
throw new SQLException(e.getMessage());
}
catch( java.io.IOException e)
{
e.printStackTrace();
throw new SQLException(e.getMessage());
}
}
void fetchIntoCache() throws SQLException
{
if (results != null) {
// System.out.println("fetching into Cache !!");
results.fetchIntoCache();
}
}
//--------------------------JDBC 2.0-----------------------------
/**
* JDBC 2.0
*
* Gives the driver a hint as to the direction in which
* the rows in a result set
* will be processed. The hint applies only to result sets created
* using this Statement object. The default value is
* ResultSet.FETCH_FORWARD.
* <p>Note that this method sets the default fetch direction for
* result sets generated by this <code>Statement</code> object.
* Each result set has its own methods for getting and setting
* its own fetch direction.
* @param direction the initial direction for processing rows
* @exception SQLException if a database access error occurs
* or the given direction
* is not one of ResultSet.FETCH_FORWARD, ResultSet.FETCH_REVERSE, or
* ResultSet.FETCH_UNKNOWN
*/
public void setFetchDirection(int direction) throws SQLException
{
NotImplemented();
}
/**
* JDBC 2.0
*
* Retrieves the direction for fetching rows from
* database tables that is the default for result sets
* generated from this <code>Statement</code> object.
* If this <code>Statement</code> object has not set
* a fetch direction by calling the method <code>setFetchDirection</code>,
* the return value is implementation-specific.
*
* @return the default fetch direction for result sets generated
* from this <code>Statement</code> object
* @exception SQLException if a database access error occurs
*/
public int getFetchDirection() throws SQLException
{
NotImplemented();
return 0;
}
/**
* JDBC 2.0
*
* Gives the JDBC driver a hint as to the number of rows that should
* be fetched from the database when more rows are needed. The number
* of rows specified affects only result sets created using this
* statement. If the value specified is zero, then the hint is ignored.
* The default value is zero.
*
* @param rows the number of rows to fetch
* @exception SQLException if a database access error occurs, or the
* condition 0 <= rows <= this.getMaxRows() is not satisfied.
*/
public void setFetchSize(int rows) throws SQLException
{
NotImplemented();
}
/**
* JDBC 2.0
*
* Retrieves the number of result set rows that is the default
* fetch size for result sets
* generated from this <code>Statement</code> object.
* If this <code>Statement</code> object has not set
* a fetch size by calling the method <code>setFetchSize</code>,
* the return value is implementation-specific.
* @return the default fetch size for result sets generated
* from this <code>Statement</code> object
* @exception SQLException if a database access error occurs
*/
public int getFetchSize() throws SQLException
{
NotImplemented();
return 0;
}
/**
* JDBC 2.0
*
* Retrieves the result set concurrency.
*/
public int getResultSetConcurrency() throws SQLException
{
return concurrency;
}
/**
* JDBC 2.0
*
* Determine the result set type.
*/
public int getResultSetType() throws SQLException
{
return type;
}
/**
* JDBC 2.0
*
* Adds a SQL command to the current batch of commmands for the statement.
* This method is optional.
*
* @param sql typically this is a static SQL INSERT or UPDATE statement
* @exception SQLException if a database access error occurs, or the
* driver does not support batch statements
*/
public void addBatch( String sql ) throws SQLException
{
NotImplemented();
}
/**
* JDBC 2.0
*
* Makes the set of commands in the current batch empty.
* This method is optional.
*
* @exception SQLException if a database access error occurs or the
* driver does not support batch statements
*/
public void clearBatch() throws SQLException
{
NotImplemented();
}
/**
* JDBC 2.0
*
* Submits a batch of commands to the database for execution.
* This method is optional.
*
* @return an array of update counts containing one element for each
* command in the batch. The array is ordered according
* to the order in which commands were inserted into the batch.
* @exception SQLException if a database access error occurs or the
* driver does not support batch statements
*/
public int[] executeBatch() throws SQLException
{
NotImplemented();
return null;
}
/**
* JDBC 2.0
*
* Returns the <code>Connection</code> object
* that produced this <code>Statement</code> object.
* @return the connection that produced this statement
* @exception SQLException if a database access error occurs
*/
public java.sql.Connection getConnection() throws SQLException
{
return connection;
}
protected void changeSettings(boolean autoCommit, int transactionIsolationLevel) throws SQLException
{
if( actTds != null )
{
String query = actTds.sqlStatementForSettings(autoCommit, transactionIsolationLevel);
if( query != null )
execute(actTds, query);
}
}
static public void main(String args[])
throws java.lang.ClassNotFoundException,
java.lang.IllegalAccessException,
java.lang.InstantiationException,
SQLException
{
String query = null;
String url = url = ""
+ "jdbc:freetds:"
+ "//"
+ "kap"
+ "/"
+ "pubs";
Class.forName("com.internetcds.jdbc.tds.Driver").newInstance();
java.sql.Connection connection;
connection = DriverManager.getConnection(url,
"testuser",
"password");
java.sql.Statement stmt = connection.createStatement();
query = ""
+ "update titles "
+ " set price=price+1.00 "
+ " where title_id='MC3021' or title_id = 'BU1032' ";
int count = stmt.executeUpdate(query);
System.out.println("Updated " + count + " rows.");
query =
""
+"select price, title_id, title, price*ytd_sales gross from titles"
+" where title like 'The%'";
java.sql.ResultSet rs = stmt.executeQuery(query);
while(rs.next())
{
float price = rs.getFloat("price");
if (rs.wasNull())
{
System.out.println("price: null");
}
else
{
System.out.println("price: " + price);
}
String title_id = rs.getString("title_id");
String title = rs.getString("title");
float gross = rs.getFloat("gross");
System.out.println("id: " + title_id);
System.out.println("name: " + title);
System.out.println("gross: " + gross);
System.out.println("");
}
}
}
| Removed an overlooked debug message.
git-svn-id: 1fece92013051e73d27f0f6eaec2bf855927aa58@74 97a8069c-bbaa-4fa2-9e49-561d8e0b19ef
| src.old/main/com/internetcds/jdbc/tds/TdsStatement.java | Removed an overlooked debug message. | <ide><path>rc.old/main/com/internetcds/jdbc/tds/TdsStatement.java
<ide> *
<ide> * @see java.sql.Statement
<ide> * @see ResultSet
<del> * @version $Id: TdsStatement.java,v 1.18 2002-08-28 07:44:24 alin_sinpalean Exp $
<add> * @version $Id: TdsStatement.java,v 1.19 2002-08-28 08:38:54 alin_sinpalean Exp $
<ide> */
<ide> package com.internetcds.jdbc.tds;
<ide>
<ide>
<ide> public class TdsStatement implements java.sql.Statement
<ide> {
<del> public static final String cvsVersion = "$Id: TdsStatement.java,v 1.18 2002-08-28 07:44:24 alin_sinpalean Exp $";
<add> public static final String cvsVersion = "$Id: TdsStatement.java,v 1.19 2002-08-28 08:38:54 alin_sinpalean Exp $";
<ide>
<ide>
<ide> protected TdsConnection connection; // The connection who created us
<ide> final public TdsResultSet internalExecuteQuery(Tds tds, String sql)
<ide> throws SQLException
<ide> {
<del> System.out.println(this+" >>> "+tds+" >>> "+tds.comm+" >>> "+sql);
<ide> if (execute(tds, sql))
<ide> {
<ide> startResultSet(tds); |
|
Java | mit | error: pathspec 'src/android/at/casephonegap/rootfullscreen/rootFullScreen.java' did not match any file(s) known to git
| 57d32f26bd9ef2c38bef095b6accbdc45cbc931f | 1 | casephonegap/rootFullScreen,casephonegap/rootFullScreen | package at.casephonegap.rootfullscreen;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.CallbackContext;
import org.json.JSONArray;
import org.json.JSONException;
import android.view.WindowManager.LayoutParams;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Map;
import android.os.Build;
import android.os.Build.VERSION;
import android.os.Build.VERSION_CODES;
import android.util.Log;
public class rootFullScreen extends CordovaPlugin
{ @Override
public boolean execute(String action, JSONArray args, CallbackContext callback) throws JSONException
{ // grab the correct methods
if (action.equalsIgnoreCase("enable"))
{ try
{ //REQUIRES ROOT
Build.VERSION_CODES vc = new Build.VERSION_CODES();
Build.VERSION vr = new Build.VERSION();
String ProcID = "79"; //HONEYCOMB AND OLDER
//v.RELEASE //4.0.3
if (VERSION.SDK_INT >= VERSION_CODES.ICE_CREAM_SANDWICH) { ProcID = "42"; /*ICS AND NEWER*/ }
Log.d("Fullscreen Plugin", "try hiding the system bar");
//REQUIRES ROOT
Process proc = Runtime.getRuntime().exec(new String[]{"su","-c","service call activity "+ ProcID +" s16 com.android.systemui"}); //WAS 79
proc.waitFor();
Log.d("Fullscreen Plugin", "systembar successfully hidden");
return true;
}
catch(Exception ex) { Log.d("Fullscreen Plugin", ex.getMessage()); }
/*cordova.getActivity().runOnUiThread( new Runnable()
{ public void run()
{ cordova.getActivity().getWindow().addFlags(LayoutParams.FLAG_KEEP_SCREEN_ON);
}
});
return true;*/
}
else if (action.equalsIgnoreCase("disable"))
{ try
{ Log.d("Fullscreen Plugin", "try showing the system bar");
//Process proc = Runtime.getRuntime().exec(new String[]{"am","startservice","-n","com.android.systemui/.SystemUIService"});
//proc.waitFor();
String command;
command = "LD_LIBRARY_PATH=/vendor/lib:/system/lib am startservice -n com.android.systemui/.SystemUIService";
Process proc = Runtime.getRuntime().exec(new String[] { "su", "-c", command });
proc.waitFor();
Log.d("Fullscreen Plugin", "systembar successfully showing");
return true;
}
catch(Exception ex) { Log.d("Fullscreen Plugin", ex.getMessage()); }
/*cordova.getActivity().runOnUiThread( new Runnable()
{ public void run()
{ cordova.getActivity().getWindow().clearFlags(LayoutParams.FLAG_KEEP_SCREEN_ON);
}
});
return true;*/
}
else
{ callback.error("Unknown Action: " + action);
return false;
}
}
}
| src/android/at/casephonegap/rootfullscreen/rootFullScreen.java | java file hinzugefügt
| src/android/at/casephonegap/rootfullscreen/rootFullScreen.java | java file hinzugefügt | <ide><path>rc/android/at/casephonegap/rootfullscreen/rootFullScreen.java
<add>package at.casephonegap.rootfullscreen;
<add>
<add>import org.apache.cordova.CordovaPlugin;
<add>import org.apache.cordova.CallbackContext;
<add>import org.json.JSONArray;
<add>import org.json.JSONException;
<add>
<add>import android.view.WindowManager.LayoutParams;
<add>
<add>
<add>import java.io.BufferedReader;
<add>import java.io.File;
<add>import java.io.InputStreamReader;
<add>import java.util.ArrayList;
<add>import java.util.Map;
<add>import android.os.Build;
<add>import android.os.Build.VERSION;
<add>import android.os.Build.VERSION_CODES;
<add>import android.util.Log;
<add>
<add>
<add>
<add>public class rootFullScreen extends CordovaPlugin
<add>{ @Override
<add> public boolean execute(String action, JSONArray args, CallbackContext callback) throws JSONException
<add> { // grab the correct methods
<add> if (action.equalsIgnoreCase("enable"))
<add> { try
<add> { //REQUIRES ROOT
<add> Build.VERSION_CODES vc = new Build.VERSION_CODES();
<add> Build.VERSION vr = new Build.VERSION();
<add> String ProcID = "79"; //HONEYCOMB AND OLDER
<add> //v.RELEASE //4.0.3
<add> if (VERSION.SDK_INT >= VERSION_CODES.ICE_CREAM_SANDWICH) { ProcID = "42"; /*ICS AND NEWER*/ }
<add> Log.d("Fullscreen Plugin", "try hiding the system bar");
<add> //REQUIRES ROOT
<add> Process proc = Runtime.getRuntime().exec(new String[]{"su","-c","service call activity "+ ProcID +" s16 com.android.systemui"}); //WAS 79
<add> proc.waitFor();
<add> Log.d("Fullscreen Plugin", "systembar successfully hidden");
<add> return true;
<add> }
<add> catch(Exception ex) { Log.d("Fullscreen Plugin", ex.getMessage()); }
<add>
<add> /*cordova.getActivity().runOnUiThread( new Runnable()
<add> { public void run()
<add> { cordova.getActivity().getWindow().addFlags(LayoutParams.FLAG_KEEP_SCREEN_ON);
<add> }
<add> });
<add> return true;*/
<add> }
<add> else if (action.equalsIgnoreCase("disable"))
<add> { try
<add> { Log.d("Fullscreen Plugin", "try showing the system bar");
<add> //Process proc = Runtime.getRuntime().exec(new String[]{"am","startservice","-n","com.android.systemui/.SystemUIService"});
<add> //proc.waitFor();
<add> String command;
<add> command = "LD_LIBRARY_PATH=/vendor/lib:/system/lib am startservice -n com.android.systemui/.SystemUIService";
<add> Process proc = Runtime.getRuntime().exec(new String[] { "su", "-c", command });
<add> proc.waitFor();
<add> Log.d("Fullscreen Plugin", "systembar successfully showing");
<add> return true;
<add> }
<add> catch(Exception ex) { Log.d("Fullscreen Plugin", ex.getMessage()); }
<add>
<add> /*cordova.getActivity().runOnUiThread( new Runnable()
<add> { public void run()
<add> { cordova.getActivity().getWindow().clearFlags(LayoutParams.FLAG_KEEP_SCREEN_ON);
<add> }
<add> });
<add> return true;*/
<add> }
<add> else
<add> { callback.error("Unknown Action: " + action);
<add> return false;
<add> }
<add> }
<add>} |
|
JavaScript | mit | 5a223410af8aa8210358163311502afb52e88818 | 0 | qlayer/express-restify-mongoose,codydaig/express-restify-mongoose,pixel2/koa-restify-mongoose,pixel2/koa-restify-mongoose,xdotai/express-restify-mongoose,pedrosousabarreto/express-restify-mongoose,reimund/express-restify-mongoose,gregerolsson/express-restify-mongoose,DavidBLynch/express-restify-mongoose,DavidBLynch/express-restify-mongoose,Ensequence/express-restify-mongoose,florianholzapfel/express-restify-mongoose,summer-liu/express-restify-mongoose | /**
* express-restify-mongoose.js
*
* Copyright (C) 2013 by Florian Holzapfel
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the 'Software'), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
**/
var util = require('util'),
Filter = require('./resource_filter'),
permissions = require('./permissions'),
_ = require('lodash'),
inflection = require('inflection'),
http = require('http'),
customDefaults = null;
function getDefaults() {
var options = {
prefix: '/api',
version: '/v1',
private: false,
lean: true,
plural: true,
middleware: [],
strict: false,
findOneAndUpdate: true,
findOneAndRemove: true,
contextFilter: null,
postCreate: null
};
for (var prop in customDefaults) {
options[prop] = customDefaults[prop];
}
return options;
}
function outputExpress(res, result) {
res.type('json');
res.status(200).send(JSON.stringify(result));
}
function outputRestify(res, result) {
res.send(result);
}
var restify = function(app, model, opts) {
var postProcess, lean, filter, contextFilter, postCreate, postDelete,
usingExpress,
options = getDefaults(),
queryOptions = {
protected: ['skip', 'limit', 'sort', 'populate', 'select', 'lean',
'$and', '$or', 'query'],//H+ exposes OR, AND and WHERE methods
current: {}
};
for (var prop in opts) {
if (opts[prop] instanceof Array) {
options[prop] = [];
for (var index in opts[prop]) {
options[prop][index] = opts[prop][index];
}
} else {
options[prop] = opts[prop];
}
}
options.private = options.private ? options.private.split(',') : [];
options.protected = options.protected ?
options.protected.split(',') : [];
model.schema.eachPath(function(name, path) {
if (path.options.access) {
switch (path.options.access.toLowerCase()) {
case 'private':
options.private.push(name);
break;
case 'protected':
options.protected.push(name);
break;
}
}
});
if (options.exclude) {
options.private = options.exclude.split(',');
console.error('Exclude is deprecated. Use private instead');
}
lean = options.lean;
filter = new Filter(model, options.private, options.protected);
postProcess = options.postProcess || function() {
};
if (options.middleware) {
if (!(options.middleware instanceof Array)) {
var m = options.middleware;
options.middleware = [ m ];
}
}
function cleanQuery(req, res, next) {
queryOptions.current = {};
var err = null;
for (var key in req.query) {
if (!model.schema.paths.hasOwnProperty(key)) {
if (queryOptions.protected.indexOf(key) !== -1) {
queryOptions.current[key] = req.query[key];
}
var keys = key.match('.') &&
!model.schema.paths.hasOwnProperty(key) ?
key.split('.') : false;
if(!keys || !model.schema.paths.hasOwnProperty(keys[0]) ||
!model.schema.paths[keys[0]].schema.paths.hasOwnProperty(keys[1]) ||
keys.length > 2) {
if(!model.schema.virtuals.hasOwnProperty(keys[0])) {
err = createError(400);
break;
}
}
}
}
if(err) {
onError(res,next,err);
} else {
next();
}
}
usingExpress = !options.restify;
function createError(status) {
var err = new Error(http.STATUS_CODES[status]);
err.status = status;
return err;
}
function onError(res, next, err) {
if(usingExpress) {
//next(err);
res.status(err.status).send(err.message);
} else {
res.send(err.status, err.message);
}
}
options.middleware.push(cleanQuery);
var outputFn = options.outputFn ?
options.outputFn : (options.restify ?
outputRestify : outputExpress);
if(undefined === app.delete) {
app.delete = app.del;
}
var apiUri = '%s%s/%s';
var modelName;
if (options.name) {
modelName = options.name;
}
else {
modelName = model.modelName;
}
modelName = options.plural ? inflection.pluralize(modelName)
: modelName;
modelName = options.lowercase === true ? modelName.toLowerCase()
: modelName;
var uri_item = util.format(apiUri, options.prefix, options.version,
modelName);
if (uri_item.indexOf('/:id') === -1) {
uri_item += '/:id';
}
var uri_items = uri_item.replace('/:id', '');
var uri_count = uri_items + '/count';
function buildQuery(query, req) {
var options = req.query,
excludedarr = filter.getExcluded(req.access);
var arr, i, re;
for (var key in options) {
if (excludedarr.indexOf(key) !== -1) {
// caller tries to query for excluded keys. for security
// reasons, we will skip the first -1 objects (to provoke
// an error) and immediately return;
return query.skip(-1);
}
query.where(key);
var value = options[key];
if ('~' === value[0]) {
re = new RegExp(value.substring(1), 'i');
query.where(key).regex(re);
} else if ('>' === value[0]) {
if ('=' === value[1]) {
query.gte(value.substr(2));
} else {
query.gt(value.substr(1));
}
} else if ('<' === value[0]) {
if ('=' === value[1]) {
query.lte(value.substr(2));
} else {
query.lt(value.substr(1));
}
} else if ('!' === value[0] && '=' === value[1]) { //H+ for !=
query.ne(value.substr(2));
} else if ('[' === value[0] && ']' === value[value.length - 1]) {
query.in(value.substr(1, value.length - 2).split(','));
} else {
query.equals(value);
}
}
//H+ exposes Query AND, OR and WHERE methods
if (queryOptions.current.query) {
query.where(JSON.parse(queryOptions.current.query,
jsonQueryParser));
}
//TODO - as introduction of QUERY param obsoletes need of $and, $or
if (queryOptions.current.$and) {
query.and(JSON.parse(queryOptions.current.$and, jsonQueryParser));
}
if (queryOptions.current.$or) {
query.or(JSON.parse(queryOptions.current.$or, jsonQueryParser));
}
//H+ exposes Query AND, OR methods
if (queryOptions.current.skip) {
query.skip(queryOptions.current.skip);
}
if (queryOptions.current.limit) {
query.limit(queryOptions.current.limit);
}
if (queryOptions.current.sort) {
query.sort(queryOptions.current.sort);
}
var selectObj = {root: {}};
if (queryOptions.current.select) {
if (queryOptions.current.select) {
arr = queryOptions.current.select.split(',');
for (i = 0; i < arr.length; ++i) {
if (arr[i].match(/\./)) {
var subSelect = arr[i].split('.');
if (!selectObj[subSelect[0]]) {
selectObj[subSelect[0]] = {};
//selectObj.root[subSelect[0]] = 1;
}
selectObj[subSelect[0]][subSelect[1]] = 1;
} else {
selectObj.root[arr[i]] = 1;
}
}
}
query = query.select(selectObj.root);
}
if (queryOptions.current.populate) {
arr = queryOptions.current.populate.split(',');
for (i = 0; i < arr.length; ++i) {
if (!_.isUndefined(selectObj[arr[i]]) &&
!_.isEmpty(selectObj.root)) {
selectObj.root[arr[i]] = 1;
}
query = query.populate(arr[i], selectObj[arr[i]]);
}
query.select(selectObj.root);
}
return query;
}
//H+ - JSON query param parser
//TODO - improve to serve recursive logical operators
function jsonQueryParser(key, value) {
if (_.isString(value)) {
if ('~' === value[0]) { //parse RegExp
return new RegExp(value.substring(1), 'i');
} else if ('>' === value[0]) {
if ('=' === value[1]) {
return {$gte: value.substr(2)};
} else {
return {$gt: value.substr(1)};
}
} else if ('<' === value[0]) {
if ('=' === value[1]) {
return {$lte: value.substr(2)};
} else {
return {$lt: value.substr(1)};
}
} else if ('!' === value[0] && '=' === value[1]) {
return {$ne: value.substr(2)};
}
} else if (_.isArray(value)) {
if (model.schema.paths.hasOwnProperty(key)) {
return {$in: value};
}
}
return value;
}
//H+ - JSON query param parser
function ensureContentType(req, res, next) {
var ct = req.headers['content-type'];
if (-1 === ct.indexOf('application/json')) {
onError(res,next,createError(400));
} else {
next();
}
}
function createSingleObject(body) {
for (var key in body) {
var path = model.schema.path(key);
if (typeof path === 'undefined') {
continue;
}
if (path.caster !== undefined) {
if (path.caster.instance === 'ObjectID') {
if (_.isArray(body[key])) {
for (var k = 0; k < body[key].length; ++k) {
if (typeof body[key][k] === 'object') {
body[key][k] = body[key][k]._id;
}
}
} else if ((typeof body[key] === 'object') &&
(body[key] !== null)) {
body[key] = body[key]._id;
}
}
} else if ((path.instance === 'ObjectID') &&
(typeof body[key] === 'object') &&
(body[key] !== null)) {
body[key] = body[key]._id;
}
}
}
function createObject(req, res, next) {
if (!req.body) {
onError(res,next,createError(400));
return;
}
var filterOpts = { access: req.access };
req.body = filter.filterObject(req.body, filterOpts);
if (model.schema.options._id) {
delete req.body._id;
}
if (model.schema.options.versionKey) {
delete req.body[model.schema.options.versionKey];
}
var key, path;
if (_.isArray(req.body)) {
for (var i = 0; i < req.body.length; ++i) {
createSingleObject(req.body[i]);
}
} else {
createSingleObject(req.body);
}
model.create(req.body, function(err, item) {
if (err) {
res.setHeader('Content-Type', 'application/json');
var forwardErr = new Error(JSON.stringify(err));
forwardErr.status = 400;
onError(res,next,forwardErr);
} else {
var result = null;
if (_.isArray(req.body)) {
var items = Array.prototype.slice.call(arguments, 1);
result = filter.filterObject(items, filterOpts);
} else {
result = filter.filterObject(item, filterOpts);
}
postCreate(res, result, function(err) {
if (err) {
onError(res,next,createError(400));
}
else {
outputFn(res, result);
next();
}
});
}
});
}
function modifyObject(req, res, next) {
var byId = {};
byId[options.idProperty || '_id'] = req.params.id;
if (!req.body) {
onError(res,next,createError(400));
return;
}
var filterOpts = { access: req.access };
req.body = filter.filterObject(req.body, filterOpts);
delete req.body._id;
if (model.schema.options.versionKey) {
delete req.body[model.schema.options.versionKey];
}
for (var key in req.body) {
var path = model.schema.path(key);
if (typeof path === 'undefined') {
continue;
}
if (path.caster !== undefined) {
if (path.caster.instance === 'ObjectID') {
if (_.isArray(req.body[key])) {
for (var j = 0; j < req.body[key].length; ++j) {
if (typeof req.body[key][j] === 'object') {
req.body[key][j] = req.body[key][j]._id;
}
}
} else if ((typeof req.body[key] === 'object') &&
(req.body[key] !== null)) {
req.body[key] = req.body[key]._id;
}
}
} else if ((path.instance === 'ObjectID') &&
(typeof req.body[key] === 'object') &&
(req.body[key] !== null)) {
req.body[key] = req.body[key]._id;
}
}
if (options.findOneAndUpdate) {
contextFilter(model, req, function(filteredContext) {
filteredContext.findOneAndUpdate(byId, req.body, {},
function(err, item) {
if (err || !item) {
onError(res,next,createError(404));
} else {
item = filter.filterObject(item, filterOpts);
outputFn(res, item);
next();
}
});
});
} else {
contextFilter(model, req, function(filteredContext) {
filteredContext.findOne(byId, function(err, doc) {
if (err || !doc) {
onError(res,next,createError(404));
} else {
for (var key in req.body) {
doc[key] = req.body[key];
}
doc.save(function(err, item) {
if (err) {
onError(res,next,createError(404));
} else {
item = filter.filterObject(item, filterOpts);
outputFn(res, item);
next();
}
});
}
});
});
}
}
var write_middleware = options.middleware.slice(-1);
if (options.prereq) {
var allowMW = permissions.allow(usingExpress, options.prereq);
write_middleware = [allowMW].concat(write_middleware);
}
var delete_middleware = write_middleware.slice(0);
if (options.access) {
var accessMW = permissions.access(options.access);
options.middleware.push(accessMW);
write_middleware.push(accessMW);
}
write_middleware.push(ensureContentType);
if (options.contextFilter) {
contextFilter = options.contextFilter;
}
else {
contextFilter = function(model, req, done) {
done(model);
};
}
if (options.postCreate) {
postCreate = options.postCreate;
}
else {
postCreate = function(res, result, done) {
done();
};
}
if (options.postDelete) {
postDelete = options.postDelete;
}
else {
postDelete = function(res, result, done) {
done();
};
}
app.get(uri_items, options.middleware, function(req, res, next) {
contextFilter(model, req, function(filteredContext) {
buildQuery(filteredContext.find(), req).lean(lean)
.exec(function(err, items) {
if (err) {
onError(res,next,createError(400));
} else {
var populate = queryOptions.current.populate,
opts = {
populate: populate,
access: req.access
};
items = filter.filterObject(items, opts);
outputFn(res, items);
next();
}
});
});
}, postProcess);
app.get(uri_count, options.middleware, function(req, res, next) {
contextFilter(model, req, function(filteredContext) {
buildQuery(filteredContext.count(), req).exec(function(err, count) {
if (err) {
onError(res,next,createError(400));
} else {
outputFn(res, { count: count });
next();
}
});
});
}, postProcess);
app.post(uri_items, write_middleware, createObject, postProcess);
if (!options.strict) {
app.put(uri_items, write_middleware, createObject, postProcess);
}
if (!options.strict) {
// TODO: reconsider this function
app.delete(uri_items, delete_middleware, function(req, res, next) {
contextFilter(model, req, function(filteredContext) {
var results, error;
buildQuery(filteredContext.find(), req).remove(function(err) {
if (err) {
onError(res,next,createError(400));
} else {
if(usingExpress) {
res.status(200).end();
} else {
res.send(200);
}
next();
}
});
});
}, postProcess);
}
app.get(uri_item, options.middleware, function(req, res, next) {
var byId = {};
byId[options.idProperty || '_id'] = req.params.id;
contextFilter(model, req, function(filteredContext) {
buildQuery(filteredContext.findOne().and(byId), req).lean(lean)
.findOne(function(err, item) {
if (err || !item) {
onError(res,next,createError(404));
} else {
var populate = queryOptions.current.populate,
opts = {
populate: populate,
access: req.access
};
item = filter.filterObject(item, opts);
outputFn(res, item);
next();
}
});
});
}, postProcess);
if (!options.strict) {
// TODO: POST (create) doesn't make sense here
app.post(uri_item, write_middleware, modifyObject, postProcess);
}
app.put(uri_item, write_middleware, modifyObject, postProcess);
app.delete(uri_item, delete_middleware, function(req, res, next) {
var byId = {};
byId[options.idProperty || '_id'] = req.params.id;
if (options.findOneAndUpdate) {
contextFilter(model, req, function(filteredContext) {
var results, error;
filteredContext.find().and(byId).findOneAndRemove(
function(err, result) {
if (err || !result) {
onError(res,next,createError(404));
} else {
postDelete(res, [result], function(err) {
if (err) {
onError(res,next,createError(400));
}
else {
if(usingExpress) {
res.status(200).end();
} else {
res.send(200);
}
next();
}
});
}
});
});
} else {
contextFilter(model, req, function(filteredContext) {
filteredContext.findOne(byId, function(err, doc) {
if (err || !doc) {
onError(res,next,createError(404));
} else {
doc.remove(function(err, result) {
postDelete(res, [doc], function(err) {
if (err) {
onError(res,next,createError(400));
}
else {
if(usingExpress) {
res.status(200).end();
} else {
res.send(200);
}
next();
}
});
});
}
});
});
}
}, postProcess);
};
module.exports.defaults = function(options) {
customDefaults = options;
};
module.exports.serve = restify;
| lib/express-restify-mongoose.js | /**
* express-restify-mongoose.js
*
* Copyright (C) 2013 by Florian Holzapfel
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the 'Software'), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
**/
var util = require('util'),
Filter = require('./resource_filter'),
permissions = require('./permissions'),
_ = require('lodash'),
inflection = require('inflection'),
http = require('http'),
customDefaults = null;
function getDefaults() {
var options = {
prefix: '/api',
version: '/v1',
private: false,
lean: true,
plural: true,
middleware: [],
strict: false,
findOneAndUpdate: true,
findOneAndRemove: true,
contextFilter: null,
postCreate: null
};
for (var prop in customDefaults) {
options[prop] = customDefaults[prop];
}
return options;
}
function outputExpress(res, result) {
res.type('json');
res.status(200).send(JSON.stringify(result));
}
function outputRestify(res, result) {
res.send(result);
}
var restify = function(app, model, opts) {
var postProcess, lean, filter, contextFilter, postCreate, postDelete,
usingExpress,
options = getDefaults(),
queryOptions = {
protected: ['skip', 'limit', 'sort', 'populate', 'select', 'lean',
'$and', '$or', 'query'],//H+ exposes OR, AND and WHERE methods
current: {}
};
for (var prop in opts) {
if (opts[prop] instanceof Array) {
options[prop] = [];
for (var index in opts[prop]) {
options[prop][index] = opts[prop][index];
}
} else {
options[prop] = opts[prop];
}
}
options.private = options.private ? options.private.split(',') : [];
options.protected = options.protected ?
options.protected.split(',') : [];
model.schema.eachPath(function(name, path) {
if (path.options.access) {
switch (path.options.access.toLowerCase()) {
case 'private':
options.private.push(name);
break;
case 'protected':
options.protected.push(name);
break;
}
}
});
if (options.exclude) {
options.private = options.exclude.split(',');
console.error('Exclude is deprecated. Use private instead');
}
lean = options.lean;
filter = new Filter(model, options.private, options.protected);
postProcess = options.postProcess || function() {
};
if (options.middleware) {
if (!(options.middleware instanceof Array)) {
var m = options.middleware;
options.middleware = [ m ];
}
}
function cleanQuery(req, res, next) {
queryOptions.current = {};
var err = null;
for (var key in req.query) {
if (!model.schema.paths.hasOwnProperty(key)) {
if (queryOptions.protected.indexOf(key) !== -1) {
queryOptions.current[key] = req.query[key];
}
var keys = key.match('.') &&
!model.schema.paths.hasOwnProperty(key) ?
key.split('.') : false;
if(!keys || !model.schema.paths.hasOwnProperty(keys[0]) ||
!model.schema.paths[keys[0]].schema.paths.hasOwnProperty(keys[1]) ||
keys.length > 2) {
if(!model.schema.virtuals.hasOwnProperty(keys[0])) {
err = createError(400);
break;
}
}
}
}
if(err) {
onError(res,next,err);
} else {
next();
}
}
usingExpress = !options.restify;
function createError(status) {
var err = new Error(http.STATUS_CODES[status]);
err.status = status;
return err;
}
function onError(res, next, err) {
if(usingExpress) {
//next(err);
res.status(err.status).send(err.message);
} else {
res.send(err.status, err.message);
}
}
options.middleware.push(cleanQuery);
var outputFn = options.outputFn ?
options.outputFn : (options.restify ?
outputRestify : outputExpress);
if(undefined === app.delete) {
app.delete = app.del;
}
var apiUri = '%s%s/%s';
var modelName;
if (options.name) {
modelName = options.name;
}
else {
modelName = model.modelName;
}
modelName = options.plural ? inflection.pluralize(modelName)
: modelName;
modelName = options.lowercase === true ? modelName.toLowerCase()
: modelName;
var uri_item = util.format(apiUri, options.prefix, options.version,
modelName);
if (uri_item.indexOf('/:id') === -1) {
uri_item += '/:id';
}
var uri_items = uri_item.replace('/:id', '');
var uri_count = uri_items + '/count';
function buildQuery(query, req) {
var options = req.query,
excludedarr = filter.getExcluded(req.access);
var arr, i, re;
for (var key in options) {
if (excludedarr.indexOf(key) !== -1) {
// caller tries to query for excluded keys. for security
// reasons, we will skip the first -1 objects (to provoke
// an error) and immediately return;
return query.skip(-1);
}
console.log('QUERY ' + key + options[key]);
query.where(key);
var value = options[key];
if ('~' === value[0]) {
re = new RegExp(value.substring(1), 'i');
query.where(key).regex(re);
} else if ('>' === value[0]) {
if ('=' === value[1]) {
query.gte(value.substr(2));
} else {
query.gt(value.substr(1));
}
} else if ('<' === value[0]) {
if ('=' === value[1]) {
query.lte(value.substr(2));
} else {
query.lt(value.substr(1));
}
} else if ('!' === value[0] && '=' === value[1]) { //H+ for !=
query.ne(value.substr(2));
} else if ('[' === value[0] && ']' === value[value.length - 1]) {
query.in(value.substr(1, value.length - 2).split(','));
} else {
query.equals(value);
}
}
//H+ exposes Query AND, OR and WHERE methods
if (queryOptions.current.query) {
query.where(JSON.parse(queryOptions.current.query,
jsonQueryParser));
}
//TODO - as introduction of QUERY param obsoletes need of $and, $or
if (queryOptions.current.$and) {
query.and(JSON.parse(queryOptions.current.$and, jsonQueryParser));
}
if (queryOptions.current.$or) {
query.or(JSON.parse(queryOptions.current.$or, jsonQueryParser));
}
//H+ exposes Query AND, OR methods
if (queryOptions.current.skip) {
query.skip(queryOptions.current.skip);
}
if (queryOptions.current.limit) {
query.limit(queryOptions.current.limit);
}
if (queryOptions.current.sort) {
query.sort(queryOptions.current.sort);
}
var selectObj = {root: {}};
if (queryOptions.current.select) {
if (queryOptions.current.select) {
arr = queryOptions.current.select.split(',');
for (i = 0; i < arr.length; ++i) {
if (arr[i].match(/\./)) {
var subSelect = arr[i].split('.');
if (!selectObj[subSelect[0]]) {
selectObj[subSelect[0]] = {};
//selectObj.root[subSelect[0]] = 1;
}
selectObj[subSelect[0]][subSelect[1]] = 1;
} else {
selectObj.root[arr[i]] = 1;
}
}
}
query = query.select(selectObj.root);
}
if (queryOptions.current.populate) {
arr = queryOptions.current.populate.split(',');
for (i = 0; i < arr.length; ++i) {
if (!_.isUndefined(selectObj[arr[i]]) &&
!_.isEmpty(selectObj.root)) {
selectObj.root[arr[i]] = 1;
}
query = query.populate(arr[i], selectObj[arr[i]]);
}
query.select(selectObj.root);
}
return query;
}
//H+ - JSON query param parser
//TODO - improve to serve recursive logical operators
function jsonQueryParser(key, value) {
if (_.isString(value)) {
if ('~' === value[0]) { //parse RegExp
return new RegExp(value.substring(1), 'i');
} else if ('>' === value[0]) {
if ('=' === value[1]) {
return {$gte: value.substr(2)};
} else {
return {$gt: value.substr(1)};
}
} else if ('<' === value[0]) {
if ('=' === value[1]) {
return {$lte: value.substr(2)};
} else {
return {$lt: value.substr(1)};
}
} else if ('!' === value[0] && '=' === value[1]) {
return {$ne: value.substr(2)};
}
} else if (_.isArray(value)) {
if (model.schema.paths.hasOwnProperty(key)) {
return {$in: value};
}
}
return value;
}
//H+ - JSON query param parser
function ensureContentType(req, res, next) {
var ct = req.headers['content-type'];
if (-1 === ct.indexOf('application/json')) {
onError(res,next,createError(400));
} else {
next();
}
}
function createSingleObject(body) {
for (var key in body) {
var path = model.schema.path(key);
if (typeof path === 'undefined') {
continue;
}
if (path.caster !== undefined) {
if (path.caster.instance === 'ObjectID') {
if (_.isArray(body[key])) {
for (var k = 0; k < body[key].length; ++k) {
if (typeof body[key][k] === 'object') {
body[key][k] = body[key][k]._id;
}
}
} else if ((typeof body[key] === 'object') &&
(body[key] !== null)) {
body[key] = body[key]._id;
}
}
} else if ((path.instance === 'ObjectID') &&
(typeof body[key] === 'object') &&
(body[key] !== null)) {
body[key] = body[key]._id;
}
}
}
function createObject(req, res, next) {
if (!req.body) {
onError(res,next,createError(400));
return;
}
var filterOpts = { access: req.access };
req.body = filter.filterObject(req.body, filterOpts);
if (model.schema.options._id) {
delete req.body._id;
}
if (model.schema.options.versionKey) {
delete req.body[model.schema.options.versionKey];
}
var key, path;
if (_.isArray(req.body)) {
for (var i = 0; i < req.body.length; ++i) {
createSingleObject(req.body[i]);
}
} else {
createSingleObject(req.body);
}
model.create(req.body, function(err, item) {
if (err) {
res.setHeader('Content-Type', 'application/json');
var forwardErr = new Error(JSON.stringify(err));
forwardErr.status = 400;
onError(res,next,forwardErr);
} else {
var result = null;
if (_.isArray(req.body)) {
var items = Array.prototype.slice.call(arguments, 1);
result = filter.filterObject(items, filterOpts);
} else {
result = filter.filterObject(item, filterOpts);
}
postCreate(res, result, function(err) {
if (err) {
onError(res,next,createError(400));
}
else {
outputFn(res, result);
next();
}
});
}
});
}
function modifyObject(req, res, next) {
var byId = {};
byId[options.idProperty || '_id'] = req.params.id;
if (!req.body) {
onError(res,next,createError(400));
return;
}
var filterOpts = { access: req.access };
req.body = filter.filterObject(req.body, filterOpts);
delete req.body._id;
if (model.schema.options.versionKey) {
delete req.body[model.schema.options.versionKey];
}
for (var key in req.body) {
var path = model.schema.path(key);
if (typeof path === 'undefined') {
continue;
}
if (path.caster !== undefined) {
if (path.caster.instance === 'ObjectID') {
if (_.isArray(req.body[key])) {
for (var j = 0; j < req.body[key].length; ++j) {
if (typeof req.body[key][j] === 'object') {
req.body[key][j] = req.body[key][j]._id;
}
}
} else if ((typeof req.body[key] === 'object') &&
(req.body[key] !== null)) {
req.body[key] = req.body[key]._id;
}
}
} else if ((path.instance === 'ObjectID') &&
(typeof req.body[key] === 'object') &&
(req.body[key] !== null)) {
req.body[key] = req.body[key]._id;
}
}
if (options.findOneAndUpdate) {
contextFilter(model, req, function(filteredContext) {
filteredContext.findOneAndUpdate(byId, req.body, {},
function(err, item) {
if (err || !item) {
onError(res,next,createError(404));
} else {
item = filter.filterObject(item, filterOpts);
outputFn(res, item);
next();
}
});
});
} else {
contextFilter(model, req, function(filteredContext) {
filteredContext.findOne(byId, function(err, doc) {
if (err || !doc) {
onError(res,next,createError(404));
} else {
for (var key in req.body) {
doc[key] = req.body[key];
}
doc.save(function(err, item) {
if (err) {
onError(res,next,createError(404));
} else {
item = filter.filterObject(item, filterOpts);
outputFn(res, item);
next();
}
});
}
});
});
}
}
var write_middleware = options.middleware.slice(-1);
if (options.prereq) {
var allowMW = permissions.allow(usingExpress, options.prereq);
write_middleware = [allowMW].concat(write_middleware);
}
var delete_middleware = write_middleware.slice(0);
if (options.access) {
var accessMW = permissions.access(options.access);
options.middleware.push(accessMW);
write_middleware.push(accessMW);
}
write_middleware.push(ensureContentType);
if (options.contextFilter) {
contextFilter = options.contextFilter;
}
else {
contextFilter = function(model, req, done) {
done(model);
};
}
if (options.postCreate) {
postCreate = options.postCreate;
}
else {
postCreate = function(res, result, done) {
done();
};
}
if (options.postDelete) {
postDelete = options.postDelete;
}
else {
postDelete = function(res, result, done) {
done();
};
}
app.get(uri_items, options.middleware, function(req, res, next) {
contextFilter(model, req, function(filteredContext) {
buildQuery(filteredContext.find(), req).lean(lean)
.exec(function(err, items) {
if (err) {
onError(res,next,createError(400));
} else {
var populate = queryOptions.current.populate,
opts = {
populate: populate,
access: req.access
};
items = filter.filterObject(items, opts);
outputFn(res, items);
next();
}
});
});
}, postProcess);
app.get(uri_count, options.middleware, function(req, res, next) {
contextFilter(model, req, function(filteredContext) {
buildQuery(filteredContext.count(), req).exec(function(err, count) {
if (err) {
onError(res,next,createError(400));
} else {
outputFn(res, { count: count });
next();
}
});
});
}, postProcess);
app.post(uri_items, write_middleware, createObject, postProcess);
if (!options.strict) {
app.put(uri_items, write_middleware, createObject, postProcess);
}
if (!options.strict) {
// TODO: reconsider this function
app.delete(uri_items, delete_middleware, function(req, res, next) {
contextFilter(model, req, function(filteredContext) {
var results, error;
buildQuery(filteredContext.find(), req).remove(function(err) {
if (err) {
onError(res,next,createError(400));
} else {
if(usingExpress) {
res.status(200).end();
} else {
res.send(200);
}
next();
}
});
});
}, postProcess);
}
app.get(uri_item, options.middleware, function(req, res, next) {
var byId = {};
byId[options.idProperty || '_id'] = req.params.id;
contextFilter(model, req, function(filteredContext) {
buildQuery(filteredContext.findOne().and(byId), req).lean(lean)
.findOne(function(err, item) {
if (err || !item) {
onError(res,next,createError(404));
} else {
var populate = queryOptions.current.populate,
opts = {
populate: populate,
access: req.access
};
item = filter.filterObject(item, opts);
outputFn(res, item);
next();
}
});
});
}, postProcess);
if (!options.strict) {
// TODO: POST (create) doesn't make sense here
app.post(uri_item, write_middleware, modifyObject, postProcess);
}
app.put(uri_item, write_middleware, modifyObject, postProcess);
app.delete(uri_item, delete_middleware, function(req, res, next) {
var byId = {};
byId[options.idProperty || '_id'] = req.params.id;
if (options.findOneAndUpdate) {
contextFilter(model, req, function(filteredContext) {
var results, error;
filteredContext.find().and(byId).findOneAndRemove(
function(err, result) {
if (err || !result) {
onError(res,next,createError(404));
} else {
postDelete(res, [result], function(err) {
if (err) {
onError(res,next,createError(400));
}
else {
if(usingExpress) {
res.status(200).end();
} else {
res.send(200);
}
next();
}
});
}
});
});
} else {
contextFilter(model, req, function(filteredContext) {
filteredContext.findOne(byId, function(err, doc) {
if (err || !doc) {
onError(res,next,createError(404));
} else {
doc.remove(function(err, result) {
postDelete(res, [doc], function(err) {
if (err) {
onError(res,next,createError(400));
}
else {
if(usingExpress) {
res.status(200).end();
} else {
res.send(200);
}
next();
}
});
});
}
});
});
}
}, postProcess);
};
module.exports.defaults = function(options) {
customDefaults = options;
};
module.exports.serve = restify;
| remove debug output
| lib/express-restify-mongoose.js | remove debug output | <ide><path>ib/express-restify-mongoose.js
<ide> // an error) and immediately return;
<ide> return query.skip(-1);
<ide> }
<del>
<del> console.log('QUERY ' + key + options[key]);
<ide>
<ide> query.where(key);
<ide> var value = options[key]; |
|
Java | apache-2.0 | error: pathspec 'src/trickyexamples/Fibonacci.java' did not match any file(s) known to git
| 706d1174b6ff046783e70679cc2753c85504b196 | 1 | antalpeti/Test | package trickyexamples;
import java.util.Scanner;
public class Fibonacci {
public static void main(String args[]) {
System.out.println("Enter number upto which Fibonacci series to print: ");
Scanner sc = new Scanner(System.in);
int number = sc.nextInt();
System.out.println("Fibonacci series upto " + number + " numbers : ");
for (int i = 1; i <= number; i++) {
System.out.print(fibonacci2(i) + " ");
}
System.out.println();
sc.close();
System.out.println("\nSum of fibonacci numbers under 1000:");
sumFibonacci();
}
/**
* Java program for Fibonacci number using recursion. This program uses tail recursion to
* calculate Fibonacci number for a given number
*
* @return Fibonacci number
*/
public static int fibonacci(int number) {
if (number == 1 || number == 2) {
return 1;
}
return fibonacci(number - 1) + fibonacci(number - 2); // tail recursion
}
/**
* Java program to calculate Fibonacci number using loop or Iteration.
*
* @return Fibonacci number
*/
public static int fibonacci2(int number) {
if (number == 1 || number == 2) {
return 1;
}
int fibo1 = 1, fibo2 = 1, fibonacci = 1;
for (int i = 3; i <= number; i++) {
fibonacci = fibo1 + fibo2;
fibo1 = fibo2;
fibo2 = fibonacci;
}
return fibonacci; // Fibonacci number
}
public static void sumFibonacci() {
int fibo1 = 1, fibo2 = 1, fibonacci = 0, sum = 0;
while (fibonacci < 1000) {
fibonacci = fibo1 + fibo2;
fibo1 = fibo2;
fibo2 = fibonacci;
sum += fibonacci;
}
System.out.println(sum);
}
}
| src/trickyexamples/Fibonacci.java | Added examples about Fibonacci sequences. | src/trickyexamples/Fibonacci.java | Added examples about Fibonacci sequences. | <ide><path>rc/trickyexamples/Fibonacci.java
<add>package trickyexamples;
<add>
<add>import java.util.Scanner;
<add>
<add>public class Fibonacci {
<add>
<add> public static void main(String args[]) {
<add>
<add> System.out.println("Enter number upto which Fibonacci series to print: ");
<add> Scanner sc = new Scanner(System.in);
<add> int number = sc.nextInt();
<add>
<add> System.out.println("Fibonacci series upto " + number + " numbers : ");
<add>
<add> for (int i = 1; i <= number; i++) {
<add> System.out.print(fibonacci2(i) + " ");
<add> }
<add> System.out.println();
<add> sc.close();
<add>
<add> System.out.println("\nSum of fibonacci numbers under 1000:");
<add> sumFibonacci();
<add> }
<add>
<add>
<add> /**
<add> * Java program for Fibonacci number using recursion. This program uses tail recursion to
<add> * calculate Fibonacci number for a given number
<add> *
<add> * @return Fibonacci number
<add> */
<add> public static int fibonacci(int number) {
<add> if (number == 1 || number == 2) {
<add> return 1;
<add> }
<add>
<add> return fibonacci(number - 1) + fibonacci(number - 2); // tail recursion
<add> }
<add>
<add> /**
<add> * Java program to calculate Fibonacci number using loop or Iteration.
<add> *
<add> * @return Fibonacci number
<add> */
<add> public static int fibonacci2(int number) {
<add> if (number == 1 || number == 2) {
<add> return 1;
<add> }
<add> int fibo1 = 1, fibo2 = 1, fibonacci = 1;
<add> for (int i = 3; i <= number; i++) {
<add>
<add> fibonacci = fibo1 + fibo2;
<add>
<add> fibo1 = fibo2;
<add> fibo2 = fibonacci;
<add>
<add> }
<add> return fibonacci; // Fibonacci number
<add> }
<add>
<add> public static void sumFibonacci() {
<add> int fibo1 = 1, fibo2 = 1, fibonacci = 0, sum = 0;
<add> while (fibonacci < 1000) {
<add> fibonacci = fibo1 + fibo2;
<add> fibo1 = fibo2;
<add> fibo2 = fibonacci;
<add> sum += fibonacci;
<add> }
<add> System.out.println(sum);
<add> }
<add>} |
|
JavaScript | mit | e04b4e4e0bdd9f439f825f51116cd9e37c6035b6 | 0 | eXon/videojs-youtube | /* The MIT License (MIT)
Copyright (c) 2014-2015 Benoit Tremblay <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. */
(function() {
'use strict';
var Tech = videojs.getComponent('Tech');
var Youtube = videojs.extends(Tech, {
constructor: function(options, ready) {
Tech.call(this, options, ready);
this.setSrc(this.options_.source, true);
},
createEl: function() {
var div = document.createElement('div');
div.setAttribute('id', this.options_.techId);
div.setAttribute('style', 'width:100%;height:100%');
var divWrapper = document.createElement('div');
divWrapper.setAttribute('style', 'width:100%;height:100%;position:relative');
divWrapper.appendChild(div);
if (!_isOnMobile && !this.options_.ytControls) {
var divBlocker = document.createElement('div');
divBlocker.setAttribute('class', 'vjs-iframe-blocker');
divBlocker.setAttribute('style', 'position:absolute;top:0;left:0;width:100%;height:100%');
// In case the blocker is still there and we want to pause
divBlocker.onclick = function() {
this.pause();
}.bind(this);
divWrapper.appendChild(divBlocker);
}
if (Youtube.isApiReady) {
this.initYTPlayer();
} else {
Youtube.apiReadyQueue.push(this);
}
return divWrapper;
},
initYTPlayer: function() {
var playerVars = {
controls: 0,
modestbranding: 1,
rel: 0,
showinfo: 0,
loop: this.options_.loop ? 1 : 0
};
// Let the user set any YouTube parameter
// https://developers.google.com/youtube/player_parameters?playerVersion=HTML5#Parameters
// To use YouTube controls, you must use ytControls instead
// To use the loop or autoplay, use the video.js settings
if (typeof this.options_.autohide !== 'undefined') {
playerVars.autohide = this.options_.autohide;
}
if (typeof this.options_['cc_load_policy'] !== 'undefined') {
playerVars['cc_load_policy'] = this.options_['cc_load_policy'];
}
if (typeof this.options_.ytControls !== 'undefined') {
playerVars.controls = this.options_.ytControls;
}
if (typeof this.options_.disablekb !== 'undefined') {
playerVars.disablekb = this.options_.disablekb;
}
if (typeof this.options_.end !== 'undefined') {
playerVars.end = this.options_.end;
}
if (typeof this.options_.color !== 'undefined') {
playerVars.color = this.options_.color;
}
if (typeof this.options_.fs !== 'undefined') {
playerVars.fs = this.options_.fs;
}
if (typeof this.options_.end !== 'undefined') {
playerVars.end = this.options_.end;
}
if (typeof this.options_.hl !== 'undefined') {
playerVars.hl = this.options_.hl;
} else if (typeof this.options_.language !== 'undefined') {
// Set the YouTube player on the same language than video.js
playerVars.hl = this.options_.language.substr(0, 2);
}
if (typeof this.options_['iv_load_policy'] !== 'undefined') {
playerVars['iv_load_policy'] = this.options_['iv_load_policy'];
}
if (typeof this.options_.list !== 'undefined') {
playerVars.list = this.options_.list;
} else if (typeof this.url.listId !== 'undefined') {
playerVars.list = this.url.listId;
}
if (typeof this.options_.listType !== 'undefined') {
playerVars.listType = this.options_.listType;
}
if (typeof this.options_.modestbranding !== 'undefined') {
playerVars.modestbranding = this.options_.modestbranding;
}
if (typeof this.options_.playlist !== 'undefined') {
playerVars.playlist = this.options_.playlist;
}
if (typeof this.options_.playsinline !== 'undefined') {
playerVars.playsinline = this.options_.playsinline;
}
if (typeof this.options_.rel !== 'undefined') {
playerVars.rel = this.options_.rel;
}
if (typeof this.options_.showinfo !== 'undefined') {
playerVars.showinfo = this.options_.showinfo;
}
if (typeof this.options_.start !== 'undefined') {
playerVars.start = this.options_.start;
}
if (typeof this.options_.theme !== 'undefined') {
playerVars.theme = this.options_.theme;
}
this.activeVideoId = this.url.videoId;
this.activeList = playerVars.list;
this.ytPlayer = new YT.Player(this.options_.techId, {
videoId: this.url.videoId,
playerVars: playerVars,
events: {
onReady: this.onPlayerReady.bind(this),
onPlaybackQualityChange: this.onPlayerPlaybackQualityChange.bind(this),
onStateChange: this.onPlayerStateChange.bind(this),
onError: this.onPlayerError.bind(this)
}
});
},
onPlayerReady: function() {
this.triggerReady();
if (this.playOnReady) {
this.play();
}
},
onPlayerPlaybackQualityChange: function() {
},
onPlayerStateChange: function(e) {
var state = e.data;
if (state === this.lastState) {
return;
}
switch (state) {
case -1:
this.trigger('durationchange');
break;
case YT.PlayerState.ENDED:
this.trigger('ended');
break;
case YT.PlayerState.PLAYING:
this.trigger('timeupdate');
this.trigger('durationchange');
this.trigger('playing');
this.trigger('play');
if (this.isSeeking) {
this.trigger('seeked');
this.isSeeking = false;
}
break;
case YT.PlayerState.PAUSED:
if (this.isSeeking) {
this.trigger('seeked');
this.isSeeking = false;
this.ytPlayer.playVideo();
} else {
this.trigger('pause');
}
break;
case YT.PlayerState.BUFFERING:
this.player_.trigger('timeupdate');
this.player_.trigger('waiting');
break;
}
this.lastState = state;
},
onPlayerError: function(e) {
this.errorNumber = e.data;
this.trigger('error');
this.ytPlayer.stopVideo();
this.ytPlayer.destroy();
this.ytPlayer = null;
},
error: function() {
switch (this.errorNumber) {
case 2:
return { code: 'Unable to find the video' };
case 5:
return { code: 'Error while trying to play the video' };
case 100:
return { code: 'Unable to find the video' };
case 101:
case 150:
return { code: 'Playback on other Websites has been disabled by the video owner.' };
}
return { code: 'YouTube unknown error (' + this.errorNumber + ')' };
},
src: function() {
return this.source;
},
poster: function() {
return this.poster;
},
setPoster: function(poster) {
this.poster = poster;
},
setSrc: function(source) {
if (!source || !source.src) {
return;
}
this.source = source;
this.url = Youtube.parseUrl(source.src);
if (!this.options_.poster) {
Youtube.loadThumbnailUrl(this.url.videoId, function(poster) {
this.setPoster(poster);
this.trigger('posterchange');
}.bind(this));
}
if (this.options_.autoplay && !_isOnMobile) {
if (this.isReady_) {
this.play();
} else {
this.playOnReady = true;
}
}
},
play: function() {
if (!this.url || !this.url.videoId) {
return;
}
if (this.isReady_) {
if (this.url.listId) {
if (this.activeList === this.url.listId) {
this.ytPlayer.playVideo();
} else {
this.ytPlayer.loadPlaylist(this.url.listId);
this.activeList = this.url.listId;
}
} if (this.activeVideoId === this.url.videoId) {
this.ytPlayer.playVideo();
} else {
this.ytPlayer.loadVideoById(this.url.videoId);
this.activeVideoId = this.url.videoId;
}
} else {
this.trigger('waiting');
this.playOnReady = true;
}
},
pause: function() {
if (this.ytPlayer) {
this.ytPlayer.pauseVideo();
}
},
paused: function() {
return (this.ytplayer) ?
(this.lastState !== YT.PlayerState.PLAYING && this.lastState !== YT.PlayerState.BUFFERING)
: true;
},
currentTime: function() {
return this.ytPlayer ? this.ytPlayer.getCurrentTime() : 0;
},
setCurrentTime: function(seconds) {
if (this.lastState === YT.PlayerState.PAUSED) {
this.timeBeforeSeek = this.currentTime();
}
this.timeBeforeSeek = this.currentTime();
this.ytPlayer.seekTo(seconds, true);
this.trigger('timeupdate');
this.trigger('seeking');
this.isSeeking = true;
// A seek event during pause does not return an event to trigger a seeked event,
// so run an interval timer to look for the currentTime to change
if (this.lastState === YT.PlayerState.PAUSED && this.timeBeforeSeek !== seconds) {
this.checkSeekedInPauseInterval = setInterval(function() {
if (this.lastState !== YT.PlayerState.PAUSED || !this.isSeeking) {
// If something changed while we were waiting for the currentTime to change,
// clear the interval timer
clearInterval(this.checkSeekedInPauseInterval);
} else if (this.currentTime() !== this.timeBeforeSeek) {
this.trigger('timeupdate');
this.trigger('seeked');
this.isSeeking = false;
clearInterval(this.checkSeekedInPauseInterval);
}
this.play();
}.bind(this), 250);
}
},
playbackRate: function() {
return this.ytPlayer ? this.ytPlayer.getPlaybackRate() : 1;
},
setPlaybackRate: function(suggestedRate) {
if (!this.ytPlayer) {
return;
}
this.ytPlayer.setPlaybackRate(suggestedRate);
this.trigger('ratechange');
},
duration: function() {
return this.ytPlayer ? this.ytPlayer.getDuration() : 0;
},
currentSrc: function() {
return this.source;
},
ended: function() {
return this.ytPlayer ? (this.lastState === YT.PlayerState.ENDED) : false;
},
volume: function() {
return this.ytPlayer ? this.ytPlayer.getVolume() / 100.0 : 1;
},
setVolume: function(percentAsDecimal) {
if (!this.ytPlayer) {
return;
}
this.ytPlayer.setVolume(percentAsDecimal * 100.0);
},
muted: function() {
return this.ytPlayer ? this.ytPlayer.isMuted() : false;
},
setMuted: function(mute) {
if (!this.ytPlayer) {
return;
}
if (mute) {
this.ytPlayer.mute();
} else {
this.ytPlayer.unMute();
}
},
buffered: function() {
if(!this.ytPlayer || !this.ytPlayer.getVideoLoadedFraction) {
return {
length: 0,
start: function() {
throw new Error('This TimeRanges object is empty');
},
end: function() {
throw new Error('This TimeRanges object is empty');
}
};
}
var end = this.ytPlayer.getVideoLoadedFraction() * this.ytPlayer.getDuration();
return {
length: 1,
start: function() { return 0; },
end: function() { return end; }
};
},
supportsFullScreen: function() {
if (typeof this.el_.webkitEnterFullScreen === 'function') {
// Seems to be broken in Chromium/Chrome && Safari in Leopard
if (/Android/.test(videojs.USER_AGENT) || !/Chrome|Mac OS X 10.5/.test(videojs.USER_AGENT)) {
return true;
}
}
return false;
}
});
Youtube.isSupported = function() {
return true;
};
Youtube.canPlaySource = function(e) {
return (e.type === 'video/youtube');
};
var _isOnMobile = /(iPad|iPhone|iPod|Android)/g.test(navigator.userAgent);
Youtube.parseUrl = function(url) {
var result = {
videoId: null
};
var regex = /^.*(youtu.be\/|v\/|u\/\w\/|embed\/|watch\?v=|\&v=)([^#\&\?]*).*/;
var match = url.match(regex);
if (match && match[2].length === 11) {
result.videoId = match[2];
}
var regPlaylist = /[?&]list=([^#\&\?]+)/;
match = url.match(regPlaylist);
if(match && match[1]) {
result.listId = match[1];
}
return result;
};
// Tries to get the highest resolution thumbnail available for the video
Youtube.loadThumbnailUrl = function(id, callback){
var uri = 'https://img.youtube.com/vi/' + id + '/maxresdefault.jpg';
var fallback = 'https://img.youtube.com/vi/' + id + '/0.jpg';
try {
var image = new Image();
image.onload = function(){
// Onload may still be called if YouTube returns the 120x90 error thumbnail
if('naturalHeight' in this){
if(this.naturalHeight <= 90 || this.naturalWidth <= 120) {
this.onerror();
return;
}
} else if(this.height <= 90 || this.width <= 120) {
this.onerror();
return;
}
callback(uri);
};
image.onerror = function(){
callback(fallback);
};
image.src = uri;
}
catch(e){ callback(fallback); }
};
function loadApi() {
var tag = document.createElement('script');
tag.src = 'https://www.youtube.com/iframe_api';
var firstScriptTag = document.getElementsByTagName('script')[0];
firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);
}
function injectCss() {
var css = '.vjs-iframe-blocker { display: none; }' +
'.vjs-user-inactive .vjs-iframe-blocker { display: block; }';
var head = document.head || document.getElementsByTagName('head')[0];
var style = document.createElement('style');
style.type = 'text/css';
if (style.styleSheet){
style.styleSheet.cssText = css;
} else {
style.appendChild(document.createTextNode(css));
}
head.appendChild(style);
}
Youtube.apiReadyQueue = [];
window.onYouTubeIframeAPIReady = function() {
Youtube.isApiReady = true;
for (var i = 0; i < Youtube.apiReadyQueue.length; ++i) {
Youtube.apiReadyQueue[i].initYTPlayer();
}
};
loadApi();
injectCss();
videojs.registerComponent('Youtube', Youtube);
})();
| src/Youtube.js | /* The MIT License (MIT)
Copyright (c) 2014-2015 Benoit Tremblay <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. */
(function() {
'use strict';
var Tech = videojs.getComponent('Tech');
var Youtube = videojs.extends(Tech, {
constructor: function(options, ready) {
Tech.call(this, options, ready);
this.setSrc(this.options_.source, true);
},
createEl: function() {
var div = document.createElement('div');
div.setAttribute('id', this.options_.techId);
div.setAttribute('style', 'width:100%;height:100%');
var divWrapper = document.createElement('div');
divWrapper.setAttribute('style', 'width:100%;height:100%;position:relative');
divWrapper.appendChild(div);
if (!_isOnMobile && !this.options_.ytControls) {
var divBlocker = document.createElement('div');
divBlocker.setAttribute('class', 'vjs-iframe-blocker');
divBlocker.setAttribute('style', 'position:absolute;top:0;left:0;width:100%;height:100%');
// In case the blocker is still there and we want to pause
divBlocker.onclick = function() {
this.pause();
}.bind(this);
divWrapper.appendChild(divBlocker);
}
if (Youtube.isApiReady) {
this.initYTPlayer();
} else {
Youtube.apiReadyQueue.push(this);
}
return divWrapper;
},
initYTPlayer: function() {
var playerVars = {
controls: 0,
modestbranding: 1,
rel: 0,
showinfo: 0,
loop: this.options_.loop ? 1 : 0
};
// Let the user set any YouTube parameter
// https://developers.google.com/youtube/player_parameters?playerVersion=HTML5#Parameters
// To use YouTube controls, you must use ytControls instead
// To use the loop or autoplay, use the video.js settings
if (typeof this.options_.autohide !== 'undefined') {
playerVars.autohide = this.options_.autohide;
}
if (typeof this.options_['cc_load_policy'] !== 'undefined') {
playerVars['cc_load_policy'] = this.options_['cc_load_policy'];
}
if (typeof this.options_.ytControls !== 'undefined') {
playerVars.controls = this.options_.ytControls;
}
if (typeof this.options_.disablekb !== 'undefined') {
playerVars.disablekb = this.options_.disablekb;
}
if (typeof this.options_.end !== 'undefined') {
playerVars.end = this.options_.end;
}
if (typeof this.options_.fs !== 'undefined') {
playerVars.fs = this.options_.fs;
}
if (typeof this.options_.end !== 'undefined') {
playerVars.end = this.options_.end;
}
if (typeof this.options_.hl !== 'undefined') {
playerVars.hl = this.options_.hl;
} else if (typeof this.options_.language !== 'undefined') {
// Set the YouTube player on the same language than video.js
playerVars.hl = this.options_.language.substr(0, 2);
}
if (typeof this.options_['iv_load_policy'] !== 'undefined') {
playerVars['iv_load_policy'] = this.options_['iv_load_policy'];
}
if (typeof this.options_.list !== 'undefined') {
playerVars.list = this.options_.list;
} else if (typeof this.url.listId !== 'undefined') {
playerVars.list = this.url.listId;
}
if (typeof this.options_.listType !== 'undefined') {
playerVars.listType = this.options_.listType;
}
if (typeof this.options_.modestbranding !== 'undefined') {
playerVars.modestbranding = this.options_.modestbranding;
}
if (typeof this.options_.playlist !== 'undefined') {
playerVars.playlist = this.options_.playlist;
}
if (typeof this.options_.playsinline !== 'undefined') {
playerVars.playsinline = this.options_.playsinline;
}
if (typeof this.options_.rel !== 'undefined') {
playerVars.rel = this.options_.rel;
}
if (typeof this.options_.showinfo !== 'undefined') {
playerVars.showinfo = this.options_.showinfo;
}
if (typeof this.options_.start !== 'undefined') {
playerVars.start = this.options_.start;
}
if (typeof this.options_.theme !== 'undefined') {
playerVars.theme = this.options_.theme;
}
this.activeVideoId = this.url.videoId;
this.activeList = playerVars.list;
this.ytPlayer = new YT.Player(this.options_.techId, {
videoId: this.url.videoId,
playerVars: playerVars,
events: {
onReady: this.onPlayerReady.bind(this),
onPlaybackQualityChange: this.onPlayerPlaybackQualityChange.bind(this),
onStateChange: this.onPlayerStateChange.bind(this),
onError: this.onPlayerError.bind(this)
}
});
},
onPlayerReady: function() {
this.triggerReady();
if (this.playOnReady) {
this.play();
}
},
onPlayerPlaybackQualityChange: function() {
},
onPlayerStateChange: function(e) {
var state = e.data;
if (state === this.lastState) {
return;
}
switch (state) {
case -1:
this.trigger('durationchange');
break;
case YT.PlayerState.ENDED:
this.trigger('ended');
break;
case YT.PlayerState.PLAYING:
this.trigger('timeupdate');
this.trigger('durationchange');
this.trigger('playing');
this.trigger('play');
if (this.isSeeking) {
this.trigger('seeked');
this.isSeeking = false;
}
break;
case YT.PlayerState.PAUSED:
if (this.isSeeking) {
this.trigger('seeked');
this.isSeeking = false;
this.ytPlayer.playVideo();
} else {
this.trigger('pause');
}
break;
case YT.PlayerState.BUFFERING:
this.player_.trigger('timeupdate');
this.player_.trigger('waiting');
break;
}
this.lastState = state;
},
onPlayerError: function(e) {
this.errorNumber = e.data;
this.trigger('error');
this.ytPlayer.stopVideo();
this.ytPlayer.destroy();
this.ytPlayer = null;
},
error: function() {
switch (this.errorNumber) {
case 2:
return { code: 'Unable to find the video' };
case 5:
return { code: 'Error while trying to play the video' };
case 100:
return { code: 'Unable to find the video' };
case 101:
case 150:
return { code: 'Playback on other Websites has been disabled by the video owner.' };
}
return { code: 'YouTube unknown error (' + this.errorNumber + ')' };
},
src: function() {
return this.source;
},
poster: function() {
return this.poster;
},
setPoster: function(poster) {
this.poster = poster;
},
setSrc: function(source) {
if (!source || !source.src) {
return;
}
this.source = source;
this.url = Youtube.parseUrl(source.src);
if (!this.options_.poster) {
Youtube.loadThumbnailUrl(this.url.videoId, function(poster) {
this.setPoster(poster);
this.trigger('posterchange');
}.bind(this));
}
if (this.options_.autoplay && !_isOnMobile) {
if (this.isReady_) {
this.play();
} else {
this.playOnReady = true;
}
}
},
play: function() {
if (!this.url || !this.url.videoId) {
return;
}
if (this.isReady_) {
if (this.url.listId) {
if (this.activeList === this.url.listId) {
this.ytPlayer.playVideo();
} else {
this.ytPlayer.loadPlaylist(this.url.listId);
this.activeList = this.url.listId;
}
} if (this.activeVideoId === this.url.videoId) {
this.ytPlayer.playVideo();
} else {
this.ytPlayer.loadVideoById(this.url.videoId);
this.activeVideoId = this.url.videoId;
}
} else {
this.trigger('waiting');
this.playOnReady = true;
}
},
pause: function() {
if (this.ytPlayer) {
this.ytPlayer.pauseVideo();
}
},
paused: function() {
return (this.ytplayer) ?
(this.lastState !== YT.PlayerState.PLAYING && this.lastState !== YT.PlayerState.BUFFERING)
: true;
},
currentTime: function() {
return this.ytPlayer ? this.ytPlayer.getCurrentTime() : 0;
},
setCurrentTime: function(seconds) {
if (this.lastState === YT.PlayerState.PAUSED) {
this.timeBeforeSeek = this.currentTime();
}
this.timeBeforeSeek = this.currentTime();
this.ytPlayer.seekTo(seconds, true);
this.trigger('timeupdate');
this.trigger('seeking');
this.isSeeking = true;
// A seek event during pause does not return an event to trigger a seeked event,
// so run an interval timer to look for the currentTime to change
if (this.lastState === YT.PlayerState.PAUSED && this.timeBeforeSeek !== seconds) {
this.checkSeekedInPauseInterval = setInterval(function() {
if (this.lastState !== YT.PlayerState.PAUSED || !this.isSeeking) {
// If something changed while we were waiting for the currentTime to change,
// clear the interval timer
clearInterval(this.checkSeekedInPauseInterval);
} else if (this.currentTime() !== this.timeBeforeSeek) {
this.trigger('timeupdate');
this.trigger('seeked');
this.isSeeking = false;
clearInterval(this.checkSeekedInPauseInterval);
}
this.play();
}.bind(this), 250);
}
},
playbackRate: function() {
return this.ytPlayer ? this.ytPlayer.getPlaybackRate() : 1;
},
setPlaybackRate: function(suggestedRate) {
if (!this.ytPlayer) {
return;
}
this.ytPlayer.setPlaybackRate(suggestedRate);
this.trigger('ratechange');
},
duration: function() {
return this.ytPlayer ? this.ytPlayer.getDuration() : 0;
},
currentSrc: function() {
return this.source;
},
ended: function() {
return this.ytPlayer ? (this.lastState === YT.PlayerState.ENDED) : false;
},
volume: function() {
return this.ytPlayer ? this.ytPlayer.getVolume() / 100.0 : 1;
},
setVolume: function(percentAsDecimal) {
if (!this.ytPlayer) {
return;
}
this.ytPlayer.setVolume(percentAsDecimal * 100.0);
},
muted: function() {
return this.ytPlayer ? this.ytPlayer.isMuted() : false;
},
setMuted: function(mute) {
if (!this.ytPlayer) {
return;
}
if (mute) {
this.ytPlayer.mute();
} else {
this.ytPlayer.unMute();
}
},
buffered: function() {
if(!this.ytPlayer || !this.ytPlayer.getVideoLoadedFraction) {
return {
length: 0,
start: function() {
throw new Error('This TimeRanges object is empty');
},
end: function() {
throw new Error('This TimeRanges object is empty');
}
};
}
var end = this.ytPlayer.getVideoLoadedFraction() * this.ytPlayer.getDuration();
return {
length: 1,
start: function() { return 0; },
end: function() { return end; }
};
},
supportsFullScreen: function() {
if (typeof this.el_.webkitEnterFullScreen === 'function') {
// Seems to be broken in Chromium/Chrome && Safari in Leopard
if (/Android/.test(videojs.USER_AGENT) || !/Chrome|Mac OS X 10.5/.test(videojs.USER_AGENT)) {
return true;
}
}
return false;
}
});
Youtube.isSupported = function() {
return true;
};
Youtube.canPlaySource = function(e) {
return (e.type === 'video/youtube');
};
var _isOnMobile = /(iPad|iPhone|iPod|Android)/g.test(navigator.userAgent);
Youtube.parseUrl = function(url) {
var result = {
videoId: null
};
var regex = /^.*(youtu.be\/|v\/|u\/\w\/|embed\/|watch\?v=|\&v=)([^#\&\?]*).*/;
var match = url.match(regex);
if (match && match[2].length === 11) {
result.videoId = match[2];
}
var regPlaylist = /[?&]list=([^#\&\?]+)/;
match = url.match(regPlaylist);
if(match && match[1]) {
result.listId = match[1];
}
return result;
};
// Tries to get the highest resolution thumbnail available for the video
Youtube.loadThumbnailUrl = function(id, callback){
var uri = 'https://img.youtube.com/vi/' + id + '/maxresdefault.jpg';
var fallback = 'https://img.youtube.com/vi/' + id + '/0.jpg';
try {
var image = new Image();
image.onload = function(){
// Onload may still be called if YouTube returns the 120x90 error thumbnail
if('naturalHeight' in this){
if(this.naturalHeight <= 90 || this.naturalWidth <= 120) {
this.onerror();
return;
}
} else if(this.height <= 90 || this.width <= 120) {
this.onerror();
return;
}
callback(uri);
};
image.onerror = function(){
callback(fallback);
};
image.src = uri;
}
catch(e){ callback(fallback); }
};
function loadApi() {
var tag = document.createElement('script');
tag.src = 'https://www.youtube.com/iframe_api';
var firstScriptTag = document.getElementsByTagName('script')[0];
firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);
}
function injectCss() {
var css = '.vjs-iframe-blocker { display: none; }' +
'.vjs-user-inactive .vjs-iframe-blocker { display: block; }';
var head = document.head || document.getElementsByTagName('head')[0];
var style = document.createElement('style');
style.type = 'text/css';
if (style.styleSheet){
style.styleSheet.cssText = css;
} else {
style.appendChild(document.createTextNode(css));
}
head.appendChild(style);
}
Youtube.apiReadyQueue = [];
window.onYouTubeIframeAPIReady = function() {
Youtube.isApiReady = true;
for (var i = 0; i < Youtube.apiReadyQueue.length; ++i) {
Youtube.apiReadyQueue[i].initYTPlayer();
}
};
loadApi();
injectCss();
videojs.registerComponent('Youtube', Youtube);
})();
| Added the color option to the player parameters.
| src/Youtube.js | Added the color option to the player parameters. | <ide><path>rc/Youtube.js
<ide>
<ide> if (typeof this.options_.end !== 'undefined') {
<ide> playerVars.end = this.options_.end;
<add> }
<add>
<add> if (typeof this.options_.color !== 'undefined') {
<add> playerVars.color = this.options_.color;
<ide> }
<ide>
<ide> if (typeof this.options_.fs !== 'undefined') { |
|
Java | apache-2.0 | 21f748f41c5c51499378e17a647c03806f95d7e4 | 0 | sacjaya/siddhi-3 | /*
* Copyright (c) 2005 - 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.siddhi.core.event;
/**
* This is the event interface exposed to the user.
*/
public interface Event {
/**
* This should return the timestamp associated with the event.
*
* @return
*/
long getTimeStamp();
/**
* This should return the object[] which holds attribute values of the event
*
* @return
*/
Object[] getData();
/**
* This should return object in the ith position of attribute array.
*
* @param i array index
* @return
*/
Object getData(int i);
/**
* This should return whether the message is expired
*
* @return
*/
Boolean isExpired();
}
| modules/siddhi-core/src/main/java/org/wso2/siddhi/core/event/Event.java | /*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.siddhi.core.event;
import java.util.Arrays;
public class Event implements StreamEvent{
private long timeStamp = -1;
private Object[] data = new Object[1];
public Event( long timeStamp, Object[] data) {
this.timeStamp = timeStamp;
this.data = data;
}
public Event(){
}
public Event(int dataSize){
this.data = new Object[dataSize];
}
public long getTimeStamp() {
return timeStamp;
}
public Object[] getData() {
return data;
}
public Object getData(int i) {
return data[i];
}
@Override
public String toString() {
return "Event{" +
" timeStamp=" + timeStamp +
", data=" + (data == null ? null : Arrays.asList(data)) +
'}';
}
@Override
public Event[] toArray() {
return new Event[]{this};
}
public void setData(Object[] data) {
this.data =data;
}
public void setTimeStamp(long timeStamp) {
this.timeStamp = timeStamp;
}
}
| commiting event interface
| modules/siddhi-core/src/main/java/org/wso2/siddhi/core/event/Event.java | commiting event interface | <ide><path>odules/siddhi-core/src/main/java/org/wso2/siddhi/core/event/Event.java
<ide> /*
<del>* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
<del>*
<del>* WSO2 Inc. licenses this file to you under the Apache License,
<del>* Version 2.0 (the "License"); you may not use this file except
<del>* in compliance with the License.
<del>* You may obtain a copy of the License at
<del>*
<del>* http://www.apache.org/licenses/LICENSE-2.0
<del>*
<del>* Unless required by applicable law or agreed to in writing,
<del>* software distributed under the License is distributed on an
<del>* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
<del>* KIND, either express or implied. See the License for the
<del>* specific language governing permissions and limitations
<del>* under the License.
<del>*/
<add> * Copyright (c) 2005 - 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License. You may obtain a copy of the License at
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<ide> package org.wso2.siddhi.core.event;
<ide>
<del>import java.util.Arrays;
<add>/**
<add> * This is the event interface exposed to the user.
<add> */
<add>public interface Event {
<add> /**
<add> * This should return the timestamp associated with the event.
<add> *
<add> * @return
<add> */
<add> long getTimeStamp();
<ide>
<del>public class Event implements StreamEvent{
<add> /**
<add> * This should return the object[] which holds attribute values of the event
<add> *
<add> * @return
<add> */
<add> Object[] getData();
<ide>
<del> private long timeStamp = -1;
<del> private Object[] data = new Object[1];
<add> /**
<add> * This should return object in the ith position of attribute array.
<add> *
<add> * @param i array index
<add> * @return
<add> */
<add> Object getData(int i);
<ide>
<del> public Event( long timeStamp, Object[] data) {
<del> this.timeStamp = timeStamp;
<del> this.data = data;
<del> }
<del>
<del> public Event(){
<del> }
<del>
<del> public Event(int dataSize){
<del> this.data = new Object[dataSize];
<del> }
<del>
<del>
<del> public long getTimeStamp() {
<del> return timeStamp;
<del> }
<del>
<del> public Object[] getData() {
<del> return data;
<del> }
<del>
<del> public Object getData(int i) {
<del> return data[i];
<del> }
<del>
<del> @Override
<del> public String toString() {
<del> return "Event{" +
<del> " timeStamp=" + timeStamp +
<del> ", data=" + (data == null ? null : Arrays.asList(data)) +
<del> '}';
<del> }
<del>
<del> @Override
<del> public Event[] toArray() {
<del> return new Event[]{this};
<del> }
<del>
<del> public void setData(Object[] data) {
<del> this.data =data;
<del> }
<del>
<del> public void setTimeStamp(long timeStamp) {
<del> this.timeStamp = timeStamp;
<del> }
<del>
<del>
<add> /**
<add> * This should return whether the message is expired
<add> *
<add> * @return
<add> */
<add> Boolean isExpired();
<ide> } |
|
Java | bsd-3-clause | error: pathspec 'core/src/sharedTest/java/org/hisp/dhis/android/core/data/datavalue/DataValueUtils.java' did not match any file(s) known to git
| 1958ec382189669261603c239cd614a01a6c0b8b | 1 | dhis2/dhis2-android-sdk,dhis2/dhis2-android-sdk,dhis2/dhis2-android-sdk | /*
* Copyright (c) 2017, University of Oslo
*
* All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.core.data.datavalue;
import java.util.HashSet;
import java.util.Set;
/**
* A collection of uids sets for dataValueCalls
*/
public final class DataValueUtils {
public final static Set<String> dataSetUids = new HashSet<>();
public final static Set<String> periodIds = new HashSet<>();
public final static Set<String> orgUnitUids = new HashSet<>();
DataValueUtils () {
dataSetUids.add("BfMAe6Itzgt");
dataSetUids.add("TuL8IOPzpHh");
periodIds.add("201712");
periodIds.add("2017");
orgUnitUids.add("DiszpKrYNg8");
}
} | core/src/sharedTest/java/org/hisp/dhis/android/core/data/datavalue/DataValueUtils.java | datavalues: add DataValueUtils for tests
| core/src/sharedTest/java/org/hisp/dhis/android/core/data/datavalue/DataValueUtils.java | datavalues: add DataValueUtils for tests | <ide><path>ore/src/sharedTest/java/org/hisp/dhis/android/core/data/datavalue/DataValueUtils.java
<add>/*
<add> * Copyright (c) 2017, University of Oslo
<add> *
<add> * All rights reserved.
<add> * Redistribution and use in source and binary forms, with or without
<add> * modification, are permitted provided that the following conditions are met:
<add> * Redistributions of source code must retain the above copyright notice, this
<add> * list of conditions and the following disclaimer.
<add> *
<add> * Redistributions in binary form must reproduce the above copyright notice,
<add> * this list of conditions and the following disclaimer in the documentation
<add> * and/or other materials provided with the distribution.
<add> * Neither the name of the HISP project nor the names of its contributors may
<add> * be used to endorse or promote products derived from this software without
<add> * specific prior written permission.
<add> *
<add> * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
<add> * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
<add> * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
<add> * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
<add> * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
<add> * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
<add> * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
<add> * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
<add> * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
<add> * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
<add> */
<add>
<add>package org.hisp.dhis.android.core.data.datavalue;
<add>
<add>import java.util.HashSet;
<add>import java.util.Set;
<add>
<add>/**
<add> * A collection of uids sets for dataValueCalls
<add> */
<add>public final class DataValueUtils {
<add>
<add> public final static Set<String> dataSetUids = new HashSet<>();
<add> public final static Set<String> periodIds = new HashSet<>();
<add> public final static Set<String> orgUnitUids = new HashSet<>();
<add>
<add> DataValueUtils () {
<add> dataSetUids.add("BfMAe6Itzgt");
<add> dataSetUids.add("TuL8IOPzpHh");
<add>
<add> periodIds.add("201712");
<add> periodIds.add("2017");
<add>
<add> orgUnitUids.add("DiszpKrYNg8");
<add> }
<add>} |
|
Java | mit | 2e6745c40e33f500080e09f67ef2e7a70a8c53ba | 0 | MrPudin/Skeem,MrPudin/Skeem,MrPudin/Skeem | package sstinc.skeem;
import org.joda.time.Period;
import java.util.ArrayList;
/**
* This class handles the addition and removal of a task within a timeblock,
* the time between two voidblocks. This class extends {@link Schedulable}.
*
* @see Task
* @see Voidblock
* @see Schedulable
*/
public class Timeblock extends Schedulable {
private ArrayList<Task> tasks_scheduled;
private Period period_used;
private Period period_left;
/**
* Default constructor. Instantiates all variables to empty values.
*/
public Timeblock() {
super();
this.tasks_scheduled = new ArrayList<>();
this.period_used = new Period();
this.period_left = new Period();
}
/**
* Datetime constructor. Sets the start and stop time of the timeblock.
* This start and stop time should be the stop time of the previous
* voidblock and the start time of the next voidblock.
*
* @param scheduled_start timeblock's start time
* @param scheduled_stop timeblock's stop time
*/
public Timeblock(Datetime scheduled_start, Datetime scheduled_stop) {
super();
this.scheduled_start = scheduled_start;
this.scheduled_stop = scheduled_stop;
this.tasks_scheduled = new ArrayList<>();
// Calculate duration
this.period_used = new Period();
this.period_left = new Period(this.getScheduledPeriod());
}
// Getters and Setters
/**
* Gets the list of scheduled tasks within the timeblock.
* @return array list of tasks scheduled
*/
public ArrayList<Task> getTasksScheduled() {
return this.tasks_scheduled;
}
/**
* Gets the period used by the tasks in the timeblock
* @return timeblock's used period
*/
public Period getPeriodUsed() {
return this.period_used;
}
/**
* Gets the unused period of the timeblock by the tasks.
* @return timeblock's available period
*/
public Period getPeriodLeft() {
return this.period_left;
}
/**
* Sets the scheduled start datetime and recalculates the total period
* and period left if there is a set scheduled stop.
*
* @param datetime new scheduled start datetime to set
*/
@Override
public void setScheduledStart(Datetime datetime) {
this.scheduled_start = datetime;
// Get the new periods
// If the scheduled stop is set
if (this.scheduled_stop.getMillis() != new Period().getMillis()) {
// Recalculate the period and period left
this.period_left = this.getScheduledPeriod().minus(this.period_used);
}
}
/**
* Sets the scheduled stop datetime and recalculates the total period and
* period left if there is a set scheduled start.
*
* @param datetime new scheduled stop datetime to set
*/
@Override
public void setScheduledStop(Datetime datetime) {
this.scheduled_stop = datetime;
// Get the new periods
// If the scheduled start is set
if (this.scheduled_start.getMillis() != new Period().getMillis()) {
// Recalculate the period and period left
this.period_left = this.getScheduledPeriod().minus(this.period_used);
}
}
/**
* Adds a new task to the timeblock. Recalculates the time used and time
* left in the timeblock and set the scheduled start and stop of the task.
*
* @param task task to add
* @return true if addition successful. false if there is not enough time
* left in the timeblcok for the task to be added.
*/
public boolean addTask(Task task) {
// Check if there is enough time left for the task
if (this.period_left.getMillis() < task.getScheduledPeriod().getMillis()) {
return false;
}
// Add to tasks_scheduled
this.tasks_scheduled.add(task);
// Recalculate time used and time left
this.period_left = this.period_left.minus(task.getScheduledPeriod());
this.period_used = this.period_used.plus(task.getScheduledPeriod());
// Set the new scheduled start and stop
task.setScheduledStart(this.scheduled_start.add(this.period_left));
task.setScheduledStop(this.scheduled_start.add(this.period_left).add(
task.getScheduledPeriod()));
return true;
}
/**
* Removes a task from the tasks scheduled. Recalculates the time used
* and time left in the timeblock.
*
* @param task task to remove
*/
public void removeTask(Task task) {
// Returns if the task is not in the tasks scheduled
if (!this.tasks_scheduled.contains(task)) {
return;
}
// Remove task from tasks scheduled
this.tasks_scheduled.remove(task);
// Recalculate time used and time left
this.period_left = this.period_left.plus(task.getPeriodNeeded());
this.period_used = this.period_used.minus(task.getPeriodNeeded());
}
}
| Android/Skeem/app/src/main/java/sstinc/skeem/Timeblock.java | package sstinc.skeem;
import org.joda.time.Period;
import java.util.ArrayList;
/**
* This class handles the addition and removal of a task within a timeblock,
* the time between two voidblocks. This class extends {@link Schedulable}.
*
* @see Task
* @see Voidblock
* @see Schedulable
*/
class Timeblock extends Schedulable {
private ArrayList<Task> tasks_scheduled;
private Period period_used;
private Period period_left;
/**
* Default constructor. Instantiates all variables to empty values.
*/
Timeblock() {
super();
this.tasks_scheduled = new ArrayList<>();
this.period_used = new Period();
this.period_left = new Period();
}
/**
* Datetime constructor. Sets the start and stop time of the timeblock.
* This start and stop time should be the stop time of the previous
* voidblock and the start time of the next voidblock.
*
* @param scheduled_start timeblock's start time
* @param scheduled_stop timeblock's stop time
*/
Timeblock(Datetime scheduled_start, Datetime scheduled_stop) {
super();
this.scheduled_start = scheduled_start;
this.scheduled_stop = scheduled_stop;
this.tasks_scheduled = new ArrayList<>();
// Calculate duration
this.period_used = new Period();
this.period_left = new Period(this.getScheduledPeriod());
}
// Getters and Setters
/**
* Gets the list of scheduled tasks within the timeblock.
* @return array list of tasks scheduled
*/
ArrayList<Task> getTasksScheduled() {
return this.tasks_scheduled;
}
/**
* Gets the period used by the tasks in the timeblock
* @return timeblock's used period
*/
Period getPeriodUsed() {
return this.period_used;
}
/**
* Gets the unused period of the timeblock by the tasks.
* @return timeblock's available period
*/
Period getPeriodLeft() {
return this.period_left;
}
/**
* Sets the scheduled start datetime and recalculates the total period
* and period left if there is a set scheduled stop.
*
* @param datetime new scheduled start datetime to set
*/
@Override
void setScheduledStart(Datetime datetime) {
this.scheduled_start = datetime;
// Get the new periods
// If the scheduled stop is set
if (this.scheduled_stop.getMillis() != new Period().getMillis()) {
// Recalculate the period and period left
this.period_left = this.getScheduledPeriod().minus(this.period_used);
}
}
/**
* Sets the scheduled stop datetime and recalculates the total period and
* period left if there is a set scheduled start.
*
* @param datetime new scheduled stop datetime to set
*/
@Override
void setScheduledStop(Datetime datetime) {
this.scheduled_stop = datetime;
// Get the new periods
// If the scheduled start is set
if (this.scheduled_start.getMillis() != new Period().getMillis()) {
// Recalculate the period and period left
this.period_left = this.getScheduledPeriod().minus(this.period_used);
}
}
/**
* Adds a new task to the timeblock. Recalculates the time used and time
* left in the timeblock and set the scheduled start and stop of the task.
*
* @param task task to add
* @return true if addition successful. false if there is not enough time
* left in the timeblcok for the task to be added.
*/
boolean addTask(Task task) {
// Check if there is enough time left for the task
if (this.period_left.getMillis() < task.getScheduledPeriod().getMillis()) {
return false;
}
// Add to tasks_scheduled
this.tasks_scheduled.add(task);
// Recalculate time used and time left
this.period_left = this.period_left.minus(task.getScheduledPeriod());
this.period_used = this.period_used.plus(task.getScheduledPeriod());
// Set the new scheduled start and stop
task.setScheduledStart(this.scheduled_start.add(this.period_left));
task.setScheduledStop(this.scheduled_start.add(this.period_left).add(
task.getScheduledPeriod()));
return true;
}
/**
* Removes a task from the tasks scheduled. Recalculates the time used
* and time left in the timeblock.
*
* @param task task to remove
*/
void removeTask(Task task) {
// Returns if the task is not in the tasks scheduled
if (!this.tasks_scheduled.contains(task)) {
return;
}
// Remove task from tasks scheduled
this.tasks_scheduled.remove(task);
// Recalculate time used and time left
this.period_left = this.period_left.plus(task.getPeriodNeeded());
this.period_used = this.period_used.minus(task.getPeriodNeeded());
}
}
| Made Timeblock's methods and constructors public
| Android/Skeem/app/src/main/java/sstinc/skeem/Timeblock.java | Made Timeblock's methods and constructors public | <ide><path>ndroid/Skeem/app/src/main/java/sstinc/skeem/Timeblock.java
<ide> * @see Voidblock
<ide> * @see Schedulable
<ide> */
<del>class Timeblock extends Schedulable {
<add>public class Timeblock extends Schedulable {
<ide> private ArrayList<Task> tasks_scheduled;
<ide> private Period period_used;
<ide> private Period period_left;
<ide> /**
<ide> * Default constructor. Instantiates all variables to empty values.
<ide> */
<del> Timeblock() {
<add> public Timeblock() {
<ide> super();
<ide> this.tasks_scheduled = new ArrayList<>();
<ide> this.period_used = new Period();
<ide> * @param scheduled_start timeblock's start time
<ide> * @param scheduled_stop timeblock's stop time
<ide> */
<del> Timeblock(Datetime scheduled_start, Datetime scheduled_stop) {
<add> public Timeblock(Datetime scheduled_start, Datetime scheduled_stop) {
<ide> super();
<ide> this.scheduled_start = scheduled_start;
<ide> this.scheduled_stop = scheduled_stop;
<ide> * Gets the list of scheduled tasks within the timeblock.
<ide> * @return array list of tasks scheduled
<ide> */
<del> ArrayList<Task> getTasksScheduled() {
<add> public ArrayList<Task> getTasksScheduled() {
<ide> return this.tasks_scheduled;
<ide> }
<ide>
<ide> * Gets the period used by the tasks in the timeblock
<ide> * @return timeblock's used period
<ide> */
<del> Period getPeriodUsed() {
<add> public Period getPeriodUsed() {
<ide> return this.period_used;
<ide> }
<ide> /**
<ide> * Gets the unused period of the timeblock by the tasks.
<ide> * @return timeblock's available period
<ide> */
<del> Period getPeriodLeft() {
<add> public Period getPeriodLeft() {
<ide> return this.period_left;
<ide> }
<ide>
<ide> * @param datetime new scheduled start datetime to set
<ide> */
<ide> @Override
<del> void setScheduledStart(Datetime datetime) {
<add> public void setScheduledStart(Datetime datetime) {
<ide> this.scheduled_start = datetime;
<ide> // Get the new periods
<ide> // If the scheduled stop is set
<ide> * @param datetime new scheduled stop datetime to set
<ide> */
<ide> @Override
<del> void setScheduledStop(Datetime datetime) {
<add> public void setScheduledStop(Datetime datetime) {
<ide> this.scheduled_stop = datetime;
<ide> // Get the new periods
<ide> // If the scheduled start is set
<ide> * @return true if addition successful. false if there is not enough time
<ide> * left in the timeblcok for the task to be added.
<ide> */
<del> boolean addTask(Task task) {
<add> public boolean addTask(Task task) {
<ide> // Check if there is enough time left for the task
<ide> if (this.period_left.getMillis() < task.getScheduledPeriod().getMillis()) {
<ide> return false;
<ide> *
<ide> * @param task task to remove
<ide> */
<del> void removeTask(Task task) {
<add> public void removeTask(Task task) {
<ide> // Returns if the task is not in the tasks scheduled
<ide> if (!this.tasks_scheduled.contains(task)) {
<ide> return; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.