diff
stringlengths
262
553k
is_single_chunk
bool
2 classes
is_single_function
bool
1 class
buggy_function
stringlengths
20
391k
fixed_function
stringlengths
0
392k
diff --git a/backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/SearchQuery.java b/backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/SearchQuery.java index d3c975918..90ce6ea79 100644 --- a/backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/SearchQuery.java +++ b/backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/SearchQuery.java @@ -1,384 +1,384 @@ package org.ovirt.engine.core.bll; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import org.ovirt.engine.core.bll.adbroker.AdActionType; import org.ovirt.engine.core.bll.adbroker.LdapBroker; import org.ovirt.engine.core.bll.adbroker.LdapBrokerUtils; import org.ovirt.engine.core.bll.adbroker.LdapFactory; import org.ovirt.engine.core.bll.adbroker.LdapQueryData; import org.ovirt.engine.core.bll.adbroker.LdapQueryDataImpl; import org.ovirt.engine.core.bll.adbroker.LdapQueryType; import org.ovirt.engine.core.bll.adbroker.LdapSearchByQueryParameters; import org.ovirt.engine.core.common.businessentities.AdUser; import org.ovirt.engine.core.common.businessentities.AuditLog; import org.ovirt.engine.core.common.businessentities.DbUser; import org.ovirt.engine.core.common.businessentities.Disk; import org.ovirt.engine.core.common.businessentities.IVdcQueryable; import org.ovirt.engine.core.common.businessentities.Quota; import org.ovirt.engine.core.common.businessentities.VDS; import org.ovirt.engine.core.common.businessentities.VDSGroup; import org.ovirt.engine.core.common.businessentities.VM; import org.ovirt.engine.core.common.businessentities.VmTemplate; import org.ovirt.engine.core.common.businessentities.ad_groups; import org.ovirt.engine.core.common.businessentities.storage_domains; import org.ovirt.engine.core.common.businessentities.storage_pool; import org.ovirt.engine.core.common.businessentities.vm_pools; import org.ovirt.engine.core.common.businessentities.gluster.GlusterVolumeEntity; import org.ovirt.engine.core.common.config.Config; import org.ovirt.engine.core.common.config.ConfigValues; import org.ovirt.engine.core.common.errors.SearchEngineIllegalCharacterException; import org.ovirt.engine.core.common.errors.SqlInjectionException; import org.ovirt.engine.core.common.queries.SearchParameters; import org.ovirt.engine.core.common.queries.SearchReturnValue; import org.ovirt.engine.core.common.utils.ListUtils; import org.ovirt.engine.core.common.utils.ListUtils.Filter; import org.ovirt.engine.core.compat.DateTime; import org.ovirt.engine.core.compat.StringHelper; import org.ovirt.engine.core.compat.TimeSpan; import org.ovirt.engine.core.dao.SearchDAO; import org.ovirt.engine.core.searchbackend.ISyntaxChecker; import org.ovirt.engine.core.searchbackend.SearchObjects; import org.ovirt.engine.core.searchbackend.SyntaxCheckerFactory; import org.ovirt.engine.core.searchbackend.SyntaxContainer; import org.ovirt.engine.core.searchbackend.SyntaxError; public class SearchQuery<P extends SearchParameters> extends QueriesCommandBase<P> { private static final HashMap<String, QueryData2> mQueriesCache = new HashMap<String, QueryData2>(); public SearchQuery(P parameters) { super(parameters); } @Override protected SearchReturnValue createReturnValue() { return new SearchReturnValue(); } @Override public SearchReturnValue getQueryReturnValue() { return (SearchReturnValue) super.getQueryReturnValue(); } @Override protected void ProceedOnFail() { getQueryReturnValue().setIsSearchValid(false); } @Override protected void executeQueryCommand() { List<? extends IVdcQueryable> returnValue = new ArrayList<IVdcQueryable>(); switch (getParameters().getSearchTypeValue()) { case VM: { returnValue = searchVmsFromDb(); break; } case AdGroup: { returnValue = searchAdGroups(); break; } case AdUser: { returnValue = searchAdUsers(); break; } case AuditLog: { returnValue = searchAuditLogEvents(); break; } case DBUser: { returnValue = searchDbUsers(); break; } case VDS: { returnValue = searchVDSsByDb(); break; } case VmTemplate: { returnValue = searchVMTemplates(); break; } case VmPools: { returnValue = searchVmPools(); break; } case Cluster: { returnValue = searchClusters(); break; } case StoragePool: { returnValue = searchStoragePool(); break; } case StorageDomain: { returnValue = searchStorageDomain(); break; } case Quota: { returnValue = searchQuota(); break; } case Disk: { returnValue = searchDisk(); break; } case GlusterVolume: { returnValue = searchGlusterVolumes(); break; } default: { log.errorFormat("Search object type not handled: {0}", getParameters().getSearchTypeValue()); break; } } getQueryReturnValue().setReturnValue(returnValue); } private List<VM> searchVmsFromDb() { List<VM> returnValue = null; QueryData2 data = InitQueryData(true); if (data == null) { returnValue = new ArrayList<VM>(); getQueryReturnValue().setExceptionString(getQueryReturnValue().getExceptionString()); } else { returnValue = getDbFacade().getVmDAO().getAllUsingQuery(data.getQuery()); for (VM vm : returnValue) { VmHandler.UpdateVmGuestAgentVersion(vm); } } return returnValue; } private List<VDS> searchVDSsByDb() { return genericSearch(getDbFacade().getVdsDAO(), true, new Filter<VDS>() { @Override public List<VDS> filter(List<VDS> data) { for (VDS vds : data) { vds.setCpuName(CpuFlagsManagerHandler.FindMaxServerCpuByFlags(vds.getcpu_flags(), vds.getvds_group_compatibility_version())); } return data; } }); } private List<AdUser> searchAdUsers() { return adSearch(LdapQueryType.searchUsers, AdActionType.SearchUserByQuery); } private List<ad_groups> searchAdGroups() { return adSearch(LdapQueryType.searchGroups, AdActionType.SearchGroupsByQuery); } /** * Performs an ldap query * @param ldapQueryType The type of query to run * @param adActionType The action to submit to the LdapBroker * @return The result of the query */ private <T extends IVdcQueryable> List<T> adSearch(LdapQueryType ldapQueryType, AdActionType adActionType) { QueryData2 data = InitQueryData(true); if (data == null) { return new ArrayList<T>(); } LdapQueryData ldapQueryData = new LdapQueryDataImpl(); ldapQueryData.setLdapQueryType(ldapQueryType); ldapQueryData.setDomain(data.getDomain()); ldapQueryData.setFilterParameters(new Object[] { data.getQueryForAdBroker() }); @SuppressWarnings("unchecked") List<T> result = (List<T>) getLdapFactory(data.getDomain()) .RunAdAction(adActionType, new LdapSearchByQueryParameters(data.getDomain(), ldapQueryData)) .getReturnValue(); return (result != null) ? result : new ArrayList<T>(); } private List<DbUser> searchDbUsers() { return genericSearch(getDbFacade().getDbUserDAO(), true, null); } private List<VmTemplate> searchVMTemplates() { return genericSearch(getDbFacade().getVmTemplateDAO(), true, new Filter<VmTemplate>() { @Override public List<VmTemplate> filter(final List<VmTemplate> data) { for (IVdcQueryable vmt_helper : data) { VmTemplate vmt = (VmTemplate) vmt_helper; VmTemplateHandler.UpdateDisksFromDb(vmt); } return data; } }); } private final <T extends IVdcQueryable> List<T> genericSearch(final SearchDAO<T> dao, final boolean useCache, final Filter<T> filter) { final QueryData2 data = InitQueryData(useCache); if (data == null) { return new ArrayList<T>(); } return ListUtils.filter(dao.getAllWithQuery(data.getQuery()), filter); } private List<AuditLog> searchAuditLogEvents() { return genericSearch(getDbFacade().getAuditLogDAO(), false, null); } private List<vm_pools> searchVmPools() { return genericSearch(getDbFacade().getVmPoolDAO(), true, null); } private List<VDSGroup> searchClusters() { return genericSearch(getDbFacade().getVdsGroupDAO(), true, null); } private List<storage_pool> searchStoragePool() { return genericSearch(getDbFacade().getStoragePoolDAO(), true, null); } private List<storage_domains> searchStorageDomain() { return genericSearch(getDbFacade().getStorageDomainDAO(), true, null); } private List<Quota> searchQuota() { return genericSearch(getDbFacade().getQuotaDAO(), true, null); } private List<Disk> searchDisk() { return genericSearch(getDbFacade().getDiskDao(), true, null); } private List<GlusterVolumeEntity> searchGlusterVolumes() { return genericSearch(getDbFacade().getGlusterVolumeDao(), true, null); } private QueryData2 InitQueryData(boolean useCache) { QueryData2 data = null; boolean isExistsValue = false; boolean IsFromYesterday = false; boolean isSafe = false; String searchKey = ""; try { String searchText = getParameters().getSearchPattern(); // find if this is a trivial search expression (like 'Vms:' etc). isSafe = SearchObjects.isSafeExpression(searchText); getQueryReturnValue().setIsSearchValid(true); if (useCache) { // first lets check the cache of queries. - searchKey = String.format("%1$s,%2$s", searchText, getParameters().getMaxCount()); + searchKey = String.format("%1$s,%2$s,%3$s", searchText, getParameters().getMaxCount(),getParameters().getCaseSensitive()); data = mQueriesCache.get(searchKey); isExistsValue = (data != null); if (isExistsValue) { TimeSpan span = DateTime.getNow().Subtract(data.getDate()); if (span.Days >= 1) { IsFromYesterday = true; } } } // query not in cache or the cached entry is too old, process the // search text. if (!isExistsValue || IsFromYesterday) { log.debugFormat("ResourceManager::searchBusinessObjects(''{0}'') - entered", searchText); data = new QueryData2(); data.setPreQueryCommand(getDbFacade().getDbEngineDialect().getPreSearchQueryCommand()); ISyntaxChecker curSyntaxChecker; String[] splitted = searchText.split("[:@ ]"); if ((StringHelper.EqOp(splitted[0].toUpperCase(), SearchObjects.AD_USER_OBJ_NAME)) || (StringHelper.EqOp(splitted[0].toUpperCase(), SearchObjects.AD_USER_PLU_OBJ_NAME)) || (StringHelper.EqOp(splitted[0].toUpperCase(), SearchObjects.AD_GROUP_OBJ_NAME)) || (StringHelper.EqOp(splitted[0].toUpperCase(), SearchObjects.AD_GROUP_PLU_OBJ_NAME))) { if (searchText.indexOf('@') > 0 && splitted.length > 1) { data.setDomain(splitted[1]); searchText = searchText.substring(0, searchText.indexOf('@')) + searchText.substring(searchText.indexOf(':')); } else { String domain = getDefaultDomain(); data.setDomain(domain); } curSyntaxChecker = SyntaxCheckerFactory.CreateADSyntaxChecker(Config .<String> GetValue(ConfigValues.AuthenticationMethod)); } else { curSyntaxChecker = SyntaxCheckerFactory .CreateBackendSyntaxChecker(Config.<String> GetValue(ConfigValues.AuthenticationMethod)); } SyntaxContainer searchObj = curSyntaxChecker.analyzeSyntaxState(searchText, true); // set the case-sensitive flag searchObj.setCaseSensitive(getParameters().getCaseSensitive()); searchObj.setMaxCount(getParameters().getMaxCount() == -1 ? Config .<Integer> GetValue(ConfigValues.SearchResultsLimit) : getParameters().getMaxCount()); // setting FromSearch value searchObj.setSearchFrom(getParameters().getSearchFrom()); if (searchObj.getError() != SyntaxError.NO_ERROR) { getQueryReturnValue().setIsSearchValid(false); log.warnFormat("ResourceManager::searchBusinessObjects - erroneous search text - ''{0}''", searchText); int startPos = searchObj.getErrorStartPos(); int endPos = searchObj.getErrorEndPos(); int length = endPos - startPos; String error = (length > 0 && ((startPos + 1 + length) < searchText.length()) && (endPos + 1 < searchText.length())) ? searchText.substring(0, startPos) + "$" + searchText.substring(startPos + 1, startPos + 1 + length) + "$" + searchText.substring(endPos + 1) : searchObj.getError().toString(); getQueryReturnValue().setExceptionString(error); return null; } if (searchObj.getvalid() != true) { getQueryReturnValue().setIsSearchValid(false); log.warnFormat("ResourceManager::searchBusinessObjects - Invalid search text - ''{0}''", searchText); return null; } // An expression is considered safe if matches a trivial search. data.setQType(searchObj.getSearchObjectStr()); data.setQuery(curSyntaxChecker.generateQueryFromSyntaxContainer(searchObj, isSafe)); data.setDate(new Date()); // when looking for tags , the query contains all parent children tag id's // statically, therefore , in order to reflect changes in the parent tree // we should not rely on the cached query in such case and have to build the // query from scratch. if (!containsStaticInValues(data.getQuery())) mQueriesCache.put(searchKey, data); } } catch (SearchEngineIllegalCharacterException e) { log.error("Search expression can not end with ESCAPE character:" + getParameters().getSearchPattern()); data = null; getQueryReturnValue().setIsSearchValid(false); } catch (SqlInjectionException e) { log.error("Sql Injection in search: " + getParameters().getSearchPattern()); data = null; getQueryReturnValue().setIsSearchValid(false); } catch (RuntimeException ex) { log.warn("Illegal search: " + getParameters().getSearchPattern(), ex); data = null; getQueryReturnValue().setIsSearchValid(false); } return data; } protected String getDefaultDomain() { return LdapBrokerUtils.getDomainsList().get(0); } protected LdapBroker getLdapFactory(String domain) { return LdapFactory.getInstance(domain); } private static boolean containsStaticInValues(String query) { final String MATCH_IN_TAG_ID_CLAUSE = "with_tags.tag_id in"; return query.toLowerCase().contains(MATCH_IN_TAG_ID_CLAUSE); } }
true
true
private QueryData2 InitQueryData(boolean useCache) { QueryData2 data = null; boolean isExistsValue = false; boolean IsFromYesterday = false; boolean isSafe = false; String searchKey = ""; try { String searchText = getParameters().getSearchPattern(); // find if this is a trivial search expression (like 'Vms:' etc). isSafe = SearchObjects.isSafeExpression(searchText); getQueryReturnValue().setIsSearchValid(true); if (useCache) { // first lets check the cache of queries. searchKey = String.format("%1$s,%2$s", searchText, getParameters().getMaxCount()); data = mQueriesCache.get(searchKey); isExistsValue = (data != null); if (isExistsValue) { TimeSpan span = DateTime.getNow().Subtract(data.getDate()); if (span.Days >= 1) { IsFromYesterday = true; } } } // query not in cache or the cached entry is too old, process the // search text. if (!isExistsValue || IsFromYesterday) { log.debugFormat("ResourceManager::searchBusinessObjects(''{0}'') - entered", searchText); data = new QueryData2(); data.setPreQueryCommand(getDbFacade().getDbEngineDialect().getPreSearchQueryCommand()); ISyntaxChecker curSyntaxChecker; String[] splitted = searchText.split("[:@ ]"); if ((StringHelper.EqOp(splitted[0].toUpperCase(), SearchObjects.AD_USER_OBJ_NAME)) || (StringHelper.EqOp(splitted[0].toUpperCase(), SearchObjects.AD_USER_PLU_OBJ_NAME)) || (StringHelper.EqOp(splitted[0].toUpperCase(), SearchObjects.AD_GROUP_OBJ_NAME)) || (StringHelper.EqOp(splitted[0].toUpperCase(), SearchObjects.AD_GROUP_PLU_OBJ_NAME))) { if (searchText.indexOf('@') > 0 && splitted.length > 1) { data.setDomain(splitted[1]); searchText = searchText.substring(0, searchText.indexOf('@')) + searchText.substring(searchText.indexOf(':')); } else { String domain = getDefaultDomain(); data.setDomain(domain); } curSyntaxChecker = SyntaxCheckerFactory.CreateADSyntaxChecker(Config .<String> GetValue(ConfigValues.AuthenticationMethod)); } else { curSyntaxChecker = SyntaxCheckerFactory .CreateBackendSyntaxChecker(Config.<String> GetValue(ConfigValues.AuthenticationMethod)); } SyntaxContainer searchObj = curSyntaxChecker.analyzeSyntaxState(searchText, true); // set the case-sensitive flag searchObj.setCaseSensitive(getParameters().getCaseSensitive()); searchObj.setMaxCount(getParameters().getMaxCount() == -1 ? Config .<Integer> GetValue(ConfigValues.SearchResultsLimit) : getParameters().getMaxCount()); // setting FromSearch value searchObj.setSearchFrom(getParameters().getSearchFrom()); if (searchObj.getError() != SyntaxError.NO_ERROR) { getQueryReturnValue().setIsSearchValid(false); log.warnFormat("ResourceManager::searchBusinessObjects - erroneous search text - ''{0}''", searchText); int startPos = searchObj.getErrorStartPos(); int endPos = searchObj.getErrorEndPos(); int length = endPos - startPos; String error = (length > 0 && ((startPos + 1 + length) < searchText.length()) && (endPos + 1 < searchText.length())) ? searchText.substring(0, startPos) + "$" + searchText.substring(startPos + 1, startPos + 1 + length) + "$" + searchText.substring(endPos + 1) : searchObj.getError().toString(); getQueryReturnValue().setExceptionString(error); return null; } if (searchObj.getvalid() != true) { getQueryReturnValue().setIsSearchValid(false); log.warnFormat("ResourceManager::searchBusinessObjects - Invalid search text - ''{0}''", searchText); return null; } // An expression is considered safe if matches a trivial search. data.setQType(searchObj.getSearchObjectStr()); data.setQuery(curSyntaxChecker.generateQueryFromSyntaxContainer(searchObj, isSafe)); data.setDate(new Date()); // when looking for tags , the query contains all parent children tag id's // statically, therefore , in order to reflect changes in the parent tree // we should not rely on the cached query in such case and have to build the // query from scratch. if (!containsStaticInValues(data.getQuery())) mQueriesCache.put(searchKey, data); } } catch (SearchEngineIllegalCharacterException e) { log.error("Search expression can not end with ESCAPE character:" + getParameters().getSearchPattern()); data = null; getQueryReturnValue().setIsSearchValid(false); } catch (SqlInjectionException e) { log.error("Sql Injection in search: " + getParameters().getSearchPattern()); data = null; getQueryReturnValue().setIsSearchValid(false); } catch (RuntimeException ex) { log.warn("Illegal search: " + getParameters().getSearchPattern(), ex); data = null; getQueryReturnValue().setIsSearchValid(false); } return data; }
private QueryData2 InitQueryData(boolean useCache) { QueryData2 data = null; boolean isExistsValue = false; boolean IsFromYesterday = false; boolean isSafe = false; String searchKey = ""; try { String searchText = getParameters().getSearchPattern(); // find if this is a trivial search expression (like 'Vms:' etc). isSafe = SearchObjects.isSafeExpression(searchText); getQueryReturnValue().setIsSearchValid(true); if (useCache) { // first lets check the cache of queries. searchKey = String.format("%1$s,%2$s,%3$s", searchText, getParameters().getMaxCount(),getParameters().getCaseSensitive()); data = mQueriesCache.get(searchKey); isExistsValue = (data != null); if (isExistsValue) { TimeSpan span = DateTime.getNow().Subtract(data.getDate()); if (span.Days >= 1) { IsFromYesterday = true; } } } // query not in cache or the cached entry is too old, process the // search text. if (!isExistsValue || IsFromYesterday) { log.debugFormat("ResourceManager::searchBusinessObjects(''{0}'') - entered", searchText); data = new QueryData2(); data.setPreQueryCommand(getDbFacade().getDbEngineDialect().getPreSearchQueryCommand()); ISyntaxChecker curSyntaxChecker; String[] splitted = searchText.split("[:@ ]"); if ((StringHelper.EqOp(splitted[0].toUpperCase(), SearchObjects.AD_USER_OBJ_NAME)) || (StringHelper.EqOp(splitted[0].toUpperCase(), SearchObjects.AD_USER_PLU_OBJ_NAME)) || (StringHelper.EqOp(splitted[0].toUpperCase(), SearchObjects.AD_GROUP_OBJ_NAME)) || (StringHelper.EqOp(splitted[0].toUpperCase(), SearchObjects.AD_GROUP_PLU_OBJ_NAME))) { if (searchText.indexOf('@') > 0 && splitted.length > 1) { data.setDomain(splitted[1]); searchText = searchText.substring(0, searchText.indexOf('@')) + searchText.substring(searchText.indexOf(':')); } else { String domain = getDefaultDomain(); data.setDomain(domain); } curSyntaxChecker = SyntaxCheckerFactory.CreateADSyntaxChecker(Config .<String> GetValue(ConfigValues.AuthenticationMethod)); } else { curSyntaxChecker = SyntaxCheckerFactory .CreateBackendSyntaxChecker(Config.<String> GetValue(ConfigValues.AuthenticationMethod)); } SyntaxContainer searchObj = curSyntaxChecker.analyzeSyntaxState(searchText, true); // set the case-sensitive flag searchObj.setCaseSensitive(getParameters().getCaseSensitive()); searchObj.setMaxCount(getParameters().getMaxCount() == -1 ? Config .<Integer> GetValue(ConfigValues.SearchResultsLimit) : getParameters().getMaxCount()); // setting FromSearch value searchObj.setSearchFrom(getParameters().getSearchFrom()); if (searchObj.getError() != SyntaxError.NO_ERROR) { getQueryReturnValue().setIsSearchValid(false); log.warnFormat("ResourceManager::searchBusinessObjects - erroneous search text - ''{0}''", searchText); int startPos = searchObj.getErrorStartPos(); int endPos = searchObj.getErrorEndPos(); int length = endPos - startPos; String error = (length > 0 && ((startPos + 1 + length) < searchText.length()) && (endPos + 1 < searchText.length())) ? searchText.substring(0, startPos) + "$" + searchText.substring(startPos + 1, startPos + 1 + length) + "$" + searchText.substring(endPos + 1) : searchObj.getError().toString(); getQueryReturnValue().setExceptionString(error); return null; } if (searchObj.getvalid() != true) { getQueryReturnValue().setIsSearchValid(false); log.warnFormat("ResourceManager::searchBusinessObjects - Invalid search text - ''{0}''", searchText); return null; } // An expression is considered safe if matches a trivial search. data.setQType(searchObj.getSearchObjectStr()); data.setQuery(curSyntaxChecker.generateQueryFromSyntaxContainer(searchObj, isSafe)); data.setDate(new Date()); // when looking for tags , the query contains all parent children tag id's // statically, therefore , in order to reflect changes in the parent tree // we should not rely on the cached query in such case and have to build the // query from scratch. if (!containsStaticInValues(data.getQuery())) mQueriesCache.put(searchKey, data); } } catch (SearchEngineIllegalCharacterException e) { log.error("Search expression can not end with ESCAPE character:" + getParameters().getSearchPattern()); data = null; getQueryReturnValue().setIsSearchValid(false); } catch (SqlInjectionException e) { log.error("Sql Injection in search: " + getParameters().getSearchPattern()); data = null; getQueryReturnValue().setIsSearchValid(false); } catch (RuntimeException ex) { log.warn("Illegal search: " + getParameters().getSearchPattern(), ex); data = null; getQueryReturnValue().setIsSearchValid(false); } return data; }
diff --git a/src/main/java/com/pahimar/ee3/handler/FuelHandler.java b/src/main/java/com/pahimar/ee3/handler/FuelHandler.java index cef80243..10d6c306 100644 --- a/src/main/java/com/pahimar/ee3/handler/FuelHandler.java +++ b/src/main/java/com/pahimar/ee3/handler/FuelHandler.java @@ -1,59 +1,59 @@ package com.pahimar.ee3.handler; import com.pahimar.ee3.block.ModBlocks; import com.pahimar.ee3.item.ModItems; import cpw.mods.fml.common.IFuelHandler; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.tileentity.TileEntityFurnace; public class FuelHandler implements IFuelHandler { private static final ItemStack ALCHEMICAL_COAL_STACK = new ItemStack(ModItems.alchemicalFuel, 1, 0); private static final ItemStack MOBIUS_FUEL_STACK = new ItemStack(ModItems.alchemicalFuel, 1, 1); private static final ItemStack AETERNALIS_FUEL_STACK = new ItemStack(ModItems.alchemicalFuel, 1, 2); private static final ItemStack ALCHEMICAL_COAL_BLOCK_STACK = new ItemStack(ModBlocks.alchemicalFuel, 1, 0); private static final ItemStack MOBIUS_FUEL_BLOCK_STACK = new ItemStack(ModBlocks.alchemicalFuel, 1, 1); private static final ItemStack AETERNALIS_FUEL_BLOCK_STACK = new ItemStack(ModBlocks.alchemicalFuel, 1, 2); @Override public int getBurnTime(ItemStack fuel) { /** * Alchemical Coal */ if (fuel.itemID == ALCHEMICAL_COAL_STACK.itemID && fuel.getItemDamage() == ALCHEMICAL_COAL_STACK.getItemDamage()) { return 8 * TileEntityFurnace.getItemBurnTime(new ItemStack(Item.coal)); } else if (fuel.itemID == ALCHEMICAL_COAL_BLOCK_STACK.itemID && fuel.getItemDamage() == ALCHEMICAL_COAL_BLOCK_STACK.getItemDamage()) { return 9 * getBurnTime(ALCHEMICAL_COAL_STACK); } /** * Mobius Fuel */ else if (fuel.itemID == MOBIUS_FUEL_STACK.itemID && fuel.getItemDamage() == MOBIUS_FUEL_STACK.getItemDamage()) { - return 8 * getBurnTime(new ItemStack(ModItems.alchemicalFuelBlock.itemID, 1, 0)); + return 8 * getBurnTime(ALCHEMICAL_COAL_BLOCK_STACK); } else if (fuel.itemID == MOBIUS_FUEL_BLOCK_STACK.itemID && fuel.getItemDamage() == MOBIUS_FUEL_BLOCK_STACK.getItemDamage()) { return 9 * getBurnTime(MOBIUS_FUEL_STACK); } /** * Aeternalis Fuel */ else if (fuel.itemID == AETERNALIS_FUEL_STACK.itemID && fuel.getItemDamage() == AETERNALIS_FUEL_STACK.getItemDamage()) { return 8 * getBurnTime(new ItemStack(ModItems.alchemicalFuelBlock.itemID, 1, 1)); } else if (fuel.itemID == AETERNALIS_FUEL_BLOCK_STACK.itemID && fuel.getItemDamage() == AETERNALIS_FUEL_BLOCK_STACK.getItemDamage()) { return 9 * getBurnTime(AETERNALIS_FUEL_STACK); } return 0; } }
true
true
public int getBurnTime(ItemStack fuel) { /** * Alchemical Coal */ if (fuel.itemID == ALCHEMICAL_COAL_STACK.itemID && fuel.getItemDamage() == ALCHEMICAL_COAL_STACK.getItemDamage()) { return 8 * TileEntityFurnace.getItemBurnTime(new ItemStack(Item.coal)); } else if (fuel.itemID == ALCHEMICAL_COAL_BLOCK_STACK.itemID && fuel.getItemDamage() == ALCHEMICAL_COAL_BLOCK_STACK.getItemDamage()) { return 9 * getBurnTime(ALCHEMICAL_COAL_STACK); } /** * Mobius Fuel */ else if (fuel.itemID == MOBIUS_FUEL_STACK.itemID && fuel.getItemDamage() == MOBIUS_FUEL_STACK.getItemDamage()) { return 8 * getBurnTime(new ItemStack(ModItems.alchemicalFuelBlock.itemID, 1, 0)); } else if (fuel.itemID == MOBIUS_FUEL_BLOCK_STACK.itemID && fuel.getItemDamage() == MOBIUS_FUEL_BLOCK_STACK.getItemDamage()) { return 9 * getBurnTime(MOBIUS_FUEL_STACK); } /** * Aeternalis Fuel */ else if (fuel.itemID == AETERNALIS_FUEL_STACK.itemID && fuel.getItemDamage() == AETERNALIS_FUEL_STACK.getItemDamage()) { return 8 * getBurnTime(new ItemStack(ModItems.alchemicalFuelBlock.itemID, 1, 1)); } else if (fuel.itemID == AETERNALIS_FUEL_BLOCK_STACK.itemID && fuel.getItemDamage() == AETERNALIS_FUEL_BLOCK_STACK.getItemDamage()) { return 9 * getBurnTime(AETERNALIS_FUEL_STACK); } return 0; }
public int getBurnTime(ItemStack fuel) { /** * Alchemical Coal */ if (fuel.itemID == ALCHEMICAL_COAL_STACK.itemID && fuel.getItemDamage() == ALCHEMICAL_COAL_STACK.getItemDamage()) { return 8 * TileEntityFurnace.getItemBurnTime(new ItemStack(Item.coal)); } else if (fuel.itemID == ALCHEMICAL_COAL_BLOCK_STACK.itemID && fuel.getItemDamage() == ALCHEMICAL_COAL_BLOCK_STACK.getItemDamage()) { return 9 * getBurnTime(ALCHEMICAL_COAL_STACK); } /** * Mobius Fuel */ else if (fuel.itemID == MOBIUS_FUEL_STACK.itemID && fuel.getItemDamage() == MOBIUS_FUEL_STACK.getItemDamage()) { return 8 * getBurnTime(ALCHEMICAL_COAL_BLOCK_STACK); } else if (fuel.itemID == MOBIUS_FUEL_BLOCK_STACK.itemID && fuel.getItemDamage() == MOBIUS_FUEL_BLOCK_STACK.getItemDamage()) { return 9 * getBurnTime(MOBIUS_FUEL_STACK); } /** * Aeternalis Fuel */ else if (fuel.itemID == AETERNALIS_FUEL_STACK.itemID && fuel.getItemDamage() == AETERNALIS_FUEL_STACK.getItemDamage()) { return 8 * getBurnTime(new ItemStack(ModItems.alchemicalFuelBlock.itemID, 1, 1)); } else if (fuel.itemID == AETERNALIS_FUEL_BLOCK_STACK.itemID && fuel.getItemDamage() == AETERNALIS_FUEL_BLOCK_STACK.getItemDamage()) { return 9 * getBurnTime(AETERNALIS_FUEL_STACK); } return 0; }
diff --git a/core/src/test/java/com/datascience/gal/IncrementalDawidSkeneTest.java b/core/src/test/java/com/datascience/gal/IncrementalDawidSkeneTest.java index 4cc093e4..ec8aac32 100644 --- a/core/src/test/java/com/datascience/gal/IncrementalDawidSkeneTest.java +++ b/core/src/test/java/com/datascience/gal/IncrementalDawidSkeneTest.java @@ -1,102 +1,102 @@ /** * */ package com.datascience.gal; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import com.datascience.core.base.LObject; import com.datascience.core.nominal.NominalProject; import com.datascience.core.base.Worker; import com.datascience.datastoring.datamodels.full.MemoryJobStorage; import org.junit.After; import org.junit.Before; import org.junit.Test; import com.datascience.utils.auxl.TestDataManager; public class IncrementalDawidSkeneTest { NominalProject project; ArrayList<String> categories; @Before public void setUp(){ categories = new ArrayList<String>(); categories.add("category1"); categories.add("category2"); IncrementalDawidSkene alg = new IncrementalDawidSkene(); MemoryJobStorage js = new MemoryJobStorage(); project = new NominalProject(alg, js.getNominalData("testId"), js.getNominalResults("testId", categories)); project.getData().addNewUpdatableAlgorithm(alg); project.initializeCategories(categories, null, null); } @After public void tearDown() throws Exception { } @Test public final void testInitializePriors() { double actual = project.getAlgorithm().prior(categories.get(0)); double expected = 1. / project.getData().getCategories().size(); assertEquals(expected, actual, TestDataManager.DELTA_DOUBLE); } @Test public final void testGetErrorRateForWorker() { Worker w = new Worker("worker1"); project.getData().addWorker(w); double errorRate = ((AbstractDawidSkene)project.getAlgorithm()).getErrorRateForWorker( w, categories.get(0), categories.get(1)); assertTrue(errorRate>=0 && errorRate<=1); } @Test public final void testGetObjectClassProbabilites(){ LObject<String> obj = new LObject<String>("object"); project.getData().addObject(obj); for (Double val : ((AbstractDawidSkene)project.getAlgorithm()).getObjectClassProbabilities(obj).values()){ - assertEquals(0.0, val, TestDataManager.DELTA_DOUBLE); + assertEquals(0.5, val, TestDataManager.DELTA_DOUBLE); } LObject<String> gold = new LObject<String>("gold_object"); gold.setGoldLabel("category1"); project.getData().addObject(gold); Map<String, Double> cp = ((AbstractDawidSkene)project.getAlgorithm()).getObjectClassProbabilities(gold); assertEquals(1., cp.get("category1"), TestDataManager.DELTA_DOUBLE); assertEquals(0., cp.get("category2"), TestDataManager.DELTA_DOUBLE); } private boolean compareHashSets(HashSet s1, HashSet s2) { if(s1== null && s2==null) return true; if(s1 != null && s2 != null) return s1.containsAll(s2) && s2.containsAll(s1); else return false; } private <T1, T2> boolean compareHashMaps(Map<T1, T2> m1, Map<T1, T2> m2) { if(m1== null && m2==null) return true; if(m1 != null && m2 != null) { for (Entry<T1, T2> entry : m1.entrySet()) { T1 key = entry.getKey(); if (!entry.getValue().equals(m2.get(key))) return false; } return true; } else return false; } }
true
true
public final void testGetObjectClassProbabilites(){ LObject<String> obj = new LObject<String>("object"); project.getData().addObject(obj); for (Double val : ((AbstractDawidSkene)project.getAlgorithm()).getObjectClassProbabilities(obj).values()){ assertEquals(0.0, val, TestDataManager.DELTA_DOUBLE); } LObject<String> gold = new LObject<String>("gold_object"); gold.setGoldLabel("category1"); project.getData().addObject(gold); Map<String, Double> cp = ((AbstractDawidSkene)project.getAlgorithm()).getObjectClassProbabilities(gold); assertEquals(1., cp.get("category1"), TestDataManager.DELTA_DOUBLE); assertEquals(0., cp.get("category2"), TestDataManager.DELTA_DOUBLE); }
public final void testGetObjectClassProbabilites(){ LObject<String> obj = new LObject<String>("object"); project.getData().addObject(obj); for (Double val : ((AbstractDawidSkene)project.getAlgorithm()).getObjectClassProbabilities(obj).values()){ assertEquals(0.5, val, TestDataManager.DELTA_DOUBLE); } LObject<String> gold = new LObject<String>("gold_object"); gold.setGoldLabel("category1"); project.getData().addObject(gold); Map<String, Double> cp = ((AbstractDawidSkene)project.getAlgorithm()).getObjectClassProbabilities(gold); assertEquals(1., cp.get("category1"), TestDataManager.DELTA_DOUBLE); assertEquals(0., cp.get("category2"), TestDataManager.DELTA_DOUBLE); }
diff --git a/test/web/org/openmrs/web/controller/user/UserFormControllerTest.java b/test/web/org/openmrs/web/controller/user/UserFormControllerTest.java index 52f4f54c..bef5287b 100644 --- a/test/web/org/openmrs/web/controller/user/UserFormControllerTest.java +++ b/test/web/org/openmrs/web/controller/user/UserFormControllerTest.java @@ -1,48 +1,48 @@ /** * The contents of this file are subject to the OpenMRS Public License * Version 1.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://license.openmrs.org * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the * License for the specific language governing rights and limitations * under the License. * * Copyright (C) OpenMRS, LLC. All Rights Reserved. */ package org.openmrs.web.controller.user; import org.junit.Test; import org.openmrs.PersonName; import org.openmrs.User; import org.openmrs.test.Verifies; import org.openmrs.web.test.BaseWebContextSensitiveTest; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpSession; import org.springframework.ui.ModelMap; import org.springframework.validation.BindException; import org.springframework.web.context.request.ServletWebRequest; import org.springframework.web.context.request.WebRequest; /** * Tests the {@link oldUserFormController} class. */ public class UserFormControllerTest extends BaseWebContextSensitiveTest { /** * @see {@link UserFormController#handleSubmission(WebRequest,HttpSession,String,String,String,null,User,BindingResult)} * */ @Test @Verifies(value = "should work for an example", method = "handleSubmission(WebRequest,HttpSession,String,String,String,null,User,BindingResult)") public void handleSubmission_shouldWorkForAnExample() throws Exception { UserFormController controller = new UserFormController(); WebRequest request = new ServletWebRequest(new MockHttpServletRequest()); User user = controller.formBackingObject(request, null); user.addName(new PersonName("This", "is", "Test")); user.getPerson().setGender("F"); - controller.handleSubmission(request, new MockHttpSession(), new ModelMap(), "Save User", "pass123", "pass123", new String[0], user, new BindException(user, "user")); + controller.handleSubmission(request, new MockHttpSession(), new ModelMap(), "Save User", "pass123", "pass123", new String[0], "true", user, new BindException(user, "user")); } }
true
true
public void handleSubmission_shouldWorkForAnExample() throws Exception { UserFormController controller = new UserFormController(); WebRequest request = new ServletWebRequest(new MockHttpServletRequest()); User user = controller.formBackingObject(request, null); user.addName(new PersonName("This", "is", "Test")); user.getPerson().setGender("F"); controller.handleSubmission(request, new MockHttpSession(), new ModelMap(), "Save User", "pass123", "pass123", new String[0], user, new BindException(user, "user")); }
public void handleSubmission_shouldWorkForAnExample() throws Exception { UserFormController controller = new UserFormController(); WebRequest request = new ServletWebRequest(new MockHttpServletRequest()); User user = controller.formBackingObject(request, null); user.addName(new PersonName("This", "is", "Test")); user.getPerson().setGender("F"); controller.handleSubmission(request, new MockHttpSession(), new ModelMap(), "Save User", "pass123", "pass123", new String[0], "true", user, new BindException(user, "user")); }
diff --git a/src/nz/govt/canterburymaps/CanterburyMaps.java b/src/nz/govt/canterburymaps/CanterburyMaps.java index e698466..302ff57 100644 --- a/src/nz/govt/canterburymaps/CanterburyMaps.java +++ b/src/nz/govt/canterburymaps/CanterburyMaps.java @@ -1,39 +1,39 @@ /* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package nz.govt.canterburymaps; import android.os.Bundle; import org.apache.cordova.*; public class CanterburyMaps extends DroidGap { @Override public void onCreate(Bundle savedInstanceState) { super.setIntegerProperty("splashscreen", R.drawable.splash); super.onCreate(savedInstanceState); // Set by <content src="index.html" /> in config.xml - super.loadUrl(Config.getStartUrl() , 100000); + super.loadUrl(Config.getStartUrl(), 100000); // Large time as we manually hide splash when ready //super.loadUrl("file:///android_asset/www/index.html") } }
true
true
public void onCreate(Bundle savedInstanceState) { super.setIntegerProperty("splashscreen", R.drawable.splash); super.onCreate(savedInstanceState); // Set by <content src="index.html" /> in config.xml super.loadUrl(Config.getStartUrl() , 100000); //super.loadUrl("file:///android_asset/www/index.html") }
public void onCreate(Bundle savedInstanceState) { super.setIntegerProperty("splashscreen", R.drawable.splash); super.onCreate(savedInstanceState); // Set by <content src="index.html" /> in config.xml super.loadUrl(Config.getStartUrl(), 100000); // Large time as we manually hide splash when ready //super.loadUrl("file:///android_asset/www/index.html") }
diff --git a/src/main/java/exceptions/ExpiredException.java b/src/main/java/exceptions/ExpiredException.java index a0a43a3..f1339b9 100644 --- a/src/main/java/exceptions/ExpiredException.java +++ b/src/main/java/exceptions/ExpiredException.java @@ -1,53 +1,53 @@ package exceptions; import degraphmalizr.ID; import scala.actors.threadpool.Arrays; import java.util.Iterator; /** * Nodes were found in the graph with a version that cannot be found in Elasticsearch. * */ public class ExpiredException extends DegraphmalizerException { final Iterable<ID> expired; public ExpiredException(Iterable<ID> expired) { super(exceptionMessage(expired)); this.expired = expired; } public ExpiredException(ID... expired) { this(Arrays.asList(expired)); } private static String exceptionMessage(Iterable<ID> expired) { - final StringBuilder sb = new StringBuilder("Query expired for ids: "); + final StringBuilder sb = new StringBuilder("Version mismatch between graph and ES for id(s): "); // id1; id2; id3 ... final Iterator<ID> ids = expired.iterator(); while(ids.hasNext()) { final ID id = ids.next(); sb.append(id.toString()); if(ids.hasNext()) sb.append("; "); } return sb.toString(); } /** * Get a list of expired ID's. * * This means that the ID in the graph has a version that can not be found in elasticsearch. */ public Iterable<ID> expired() { return expired; } }
true
true
private static String exceptionMessage(Iterable<ID> expired) { final StringBuilder sb = new StringBuilder("Query expired for ids: "); // id1; id2; id3 ... final Iterator<ID> ids = expired.iterator(); while(ids.hasNext()) { final ID id = ids.next(); sb.append(id.toString()); if(ids.hasNext()) sb.append("; "); } return sb.toString(); }
private static String exceptionMessage(Iterable<ID> expired) { final StringBuilder sb = new StringBuilder("Version mismatch between graph and ES for id(s): "); // id1; id2; id3 ... final Iterator<ID> ids = expired.iterator(); while(ids.hasNext()) { final ID id = ids.next(); sb.append(id.toString()); if(ids.hasNext()) sb.append("; "); } return sb.toString(); }
diff --git a/src/servlets/ResetAnswersServlet.java b/src/servlets/ResetAnswersServlet.java index 49d7622..ec0f795 100644 --- a/src/servlets/ResetAnswersServlet.java +++ b/src/servlets/ResetAnswersServlet.java @@ -1,24 +1,24 @@ package servlets; import db.DB; import db.RealDB; import model.User; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; //Created By Ilan Godik public class ResetAnswersServlet extends HttpServlet { protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { User user = (User) req.getSession().getAttribute("user"); if (user != null) { DB db = new RealDB(); - db.update("delete * from userAnswers where userID=" + user.id + ";"); + db.update("delete from userAnswers where userID=" + user.id + ";"); db.update("update users set lastQuestion=1 where id=" + user.id + ";"); } resp.sendRedirect("/"); } }
true
true
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { User user = (User) req.getSession().getAttribute("user"); if (user != null) { DB db = new RealDB(); db.update("delete * from userAnswers where userID=" + user.id + ";"); db.update("update users set lastQuestion=1 where id=" + user.id + ";"); } resp.sendRedirect("/"); }
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { User user = (User) req.getSession().getAttribute("user"); if (user != null) { DB db = new RealDB(); db.update("delete from userAnswers where userID=" + user.id + ";"); db.update("update users set lastQuestion=1 where id=" + user.id + ";"); } resp.sendRedirect("/"); }
diff --git a/src/edu/sc/seis/sod/database/AbstractWaveformDatabase.java b/src/edu/sc/seis/sod/database/AbstractWaveformDatabase.java index 46fee8dc3..e16686b0d 100644 --- a/src/edu/sc/seis/sod/database/AbstractWaveformDatabase.java +++ b/src/edu/sc/seis/sod/database/AbstractWaveformDatabase.java @@ -1,939 +1,939 @@ package edu.sc.seis.sod.database; import edu.iris.Fissures.model.*; import java.sql.*; import java.util.*; import org.apache.log4j.*; /** * AbstractWaveformDatabase.java * * * Created: Fri Oct 11 14:40:49 2002 * * @author <a href="mailto:">Srinivasa Telukutla</a> * @version */ public abstract class AbstractWaveformDatabase implements WaveformDatabase{ public AbstractWaveformDatabase (Connection connection){ this.connection = connection; init(); } private void init() { try { create(); getStmt = connection.prepareStatement(" SELECT waveformeventid, waveformchannelid "+ " FROM waveformchanneldb WHERE waveformid = ? "); getIdStmt = connection.prepareStatement(" SELECT waveformid FROM "+ " waveformchanneldb "+ " WHERE waveformeventid = ? AND "+ " waveformchannelid =? "); getByStatusStmt = connection.prepareStatement(" SELECT waveformid FROM waveformchanneldb "+ - " WHERE status = ? "); + " WHERE status = ? ORDER BY waveformeventid"); updateStatusStmt = connection.prepareStatement(" UPDATE waveformchanneldb "+ " SET status = ? , "+ " reason = ? "+ " WHERE waveformid = ? "); statusUpdateStmt = connection.prepareStatement(" UPDATE waveformchanneldb set status = ? "+ " WHERE status = ? "); deleteByEventIdStmt = connection.prepareStatement(" DELETE FROM waveformchanneldb "+ " WHERE waveformeventid = ? "); putInfoStmt = connection.prepareStatement(" INSERT INTO waveformdb "+ " VALUES(?,?)"); putNetInfoStmt = connection.prepareStatement(" INSERT INTO waveformnetworkdb "+ " VALUES(?, ?, ?, ?) "); putStationInfoStmt = connection.prepareStatement(" INSERT INTO waveformstationdb "+ " VALUES(?, ?, ?, ?, ?)"); putSiteInfoStmt = connection.prepareStatement(" INSERT INTO waveformsitedb "+ " VALUES(?, ?, ?, ?, ?)"); putChannelInfoStmt = connection.prepareStatement(" INSERT INTO waveformchanneldb "+ " (waveformeventid, waveformchannelid, waveformsiteid, "+ " qtime, status, numretrys, reason ) "+ " VALUES(?, ?, ?, ?, ?, ?, ?)"); networkCountStmt = connection.prepareStatement(" UPDATE waveformdb "+ " SET numnetworks = numnetworks + ? "+ " WHERE waveformeventid = ? "); stationCountStmt = connection.prepareStatement(" UPDATE waveformnetworkdb "+ " SET numstations = numstations + ? "+ " WHERE waveformeventid = ? AND "+ " waveformnetworkid = ? "); siteCountStmt = connection.prepareStatement(" UPDATE waveformstationdb "+ " SET numsites = numsites + ? "+ " WHERE waveformeventid = ? AND "+ " waveformstationid = ? "); channelCountStmt = connection.prepareStatement(" UPDATE waveformsitedb "+ " SET numchannels = numchannels + ? "+ " WHERE waveformeventid = ? AND "+ " waveformsiteid = ? "); getNetCountStmt = connection.prepareStatement(" SELECT numnetworks from waveformdb "+ " WHERE waveformeventid = ? "); getStationCountStmt = connection.prepareStatement(" SELECT numstations from "+ " waveformnetworkdb " + " WHERE waveformeventid = ? AND "+ " waveformnetworkid = ? "); getSiteCountStmt = connection.prepareStatement(" SELECT numsites from "+ " waveformstationdb "+ " WHERE waveformeventid = ? AND "+ " waveformstationid = ? "); getChannelCountStmt = connection.prepareStatement(" SELECT numchannels from "+ " waveformsitedb "+ " WHERE waveformeventid = ? AND "+ " waveformsiteid = ? "); delInfoStmt = connection.prepareStatement(" DELETE FROM waveformdb "+ " WHERE waveformeventid = ? "); delNetworkInfoStmt = connection.prepareStatement(" DELETE FROM waveformnetworkdb "+ " WHERE waveformeventid = ? AND "+ " waveformnetworkid = ? "); delStationInfoStmt = connection.prepareStatement( " DELETE FROM waveformstationdb "+ " WHERE waveformeventid = ? AND "+ " waveformstationid = ? "); delSiteInfoStmt = connection.prepareStatement(" DELETE FROM waveformsitedb "+ " WHERE waveformeventid = ? AND "+ " waveformsiteid = ? "); delChannelInfoStmt = connection.prepareStatement(" DELETE FROM waveformchanneldb "+ " WHERE waveformeventid = ? AND "+ " waveformchannelid = ? "); isInfoIns = connection.prepareStatement(" SELECT * from waveformdb "+ " WHERE waveformeventid = ? "); isNetworkInfoIns = connection.prepareStatement(" SELECT * from waveformnetworkdb "+ " WHERE waveformeventid = ? AND "+ " waveformnetworkid = ? "); isStationInfoIns = connection.prepareStatement(" SELECT * from waveformstationdb "+ " WHERE waveformeventid = ? AND "+ " waveformstationid = ? "); isSiteInfoIns = connection.prepareStatement(" SELECT * from waveformsitedb "+ " WHERE waveformeventid = ? AND "+ " waveformsiteid = ? " ); isChannelInfoIns = connection.prepareStatement(" SELECT * from waveformchanneldb "+ " WHERE waveformeventid = ? AND "+ " waveformchannelid = ? "); getIdsStmt = connection.prepareStatement(" SELECT waveformid from "+ " waveformchanneldb "+ " WHERE ( status = ? OR "+ " status = ? ) AND "+ " waveformeventid = ? "); unfinishedEventsStmt = connection.prepareStatement("SELECT waveformeventid FROM "+ " waveformdb WHERE "+ " numNetworks > ? "); unfinishedNetworkCountStmt = connection.prepareStatement("SELECT count(*) FROM "+ " waveformnetworkdb WHERE "+ " waveformeventid = ? AND "+ " waveformeventid = ? AND "+ " numStations > ? "); unfinishedStationCountStmt = connection.prepareStatement("SELECT count(*) FROM "+ " waveformstationdb WHERE "+ " waveformeventid = ? AND "+ " waveformnetworkid = ? AND "+ " numSites > ?"); unfinishedSiteCountStmt = connection.prepareStatement("SELECT count(*) FROM "+ " waveformsitedb WHERE "+ " waveformeventid = ? AND "+ " waveformstationid = ? AND "+ " numchannels > ? "); unfinishedChannelCountStmt = connection.prepareStatement("SELECT count(*) FROM "+ " waveformchanneldb WHERE "+ " waveformeventid = ? AND "+ " waveformsiteid = ? AND "+ " (status = 0 OR status = 1) "); deleteStmt = "DELETE FROM "; } catch(SQLException sqle) { sqle.printStackTrace(); } } public abstract void create(); public abstract void beginTransaction(); public abstract void endTransaction(); public int putInfo(int waveformeventid, int numNetworks) { try { if(isInfoInserted(waveformeventid)) return 0; putInfoStmt.setInt(1, waveformeventid); putInfoStmt.setInt(2, numNetworks); putInfoStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } return 0; } private boolean isInfoInserted(int waveformeventid) { try { isInfoIns.setInt(1, waveformeventid); ResultSet rs = isInfoIns.executeQuery(); if(rs.next()) return true; } catch(SQLException sqle) { } return false; } public int putNetworkInfo(int waveformeventid, int networkid, int numstations, MicroSecondDate date) { try { if(isNetworkInfoInserted(waveformeventid, networkid)) { return 0; } putNetInfoStmt.setInt(1, waveformeventid); putNetInfoStmt.setInt(2, networkid); putNetInfoStmt.setInt(3, numstations); putNetInfoStmt.setTimestamp(4, date.getTimestamp()); putNetInfoStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } return 0; } private boolean isNetworkInfoInserted(int waveformeventid, int networkid) { try { isNetworkInfoIns.setInt(1, waveformeventid); isNetworkInfoIns.setInt(2, networkid); ResultSet rs = isNetworkInfoIns.executeQuery(); if(rs.next()) return true; } catch(SQLException sqle) { sqle.printStackTrace(); } return false; } public int putStationInfo(int waveformeventid, int stationid, int networkid, int numsites, MicroSecondDate date) { try { if(isStationInfoInserted(waveformeventid, stationid)) return 0; insert(putStationInfoStmt, 1, waveformeventid, stationid, networkid, numsites, date); putStationInfoStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } return 0; } private boolean isStationInfoInserted(int waveformeventid, int stationid ) { try { isStationInfoIns.setInt(1,waveformeventid); isStationInfoIns.setInt(2, stationid); ResultSet rs = isStationInfoIns.executeQuery(); if(rs.next()) return true; } catch(SQLException sqle) { sqle.printStackTrace(); } return false; } public int putSiteInfo(int waveformeventid, int siteid, int stationid, int numchannels, MicroSecondDate date) { try { if(isSiteInfoInserted(waveformeventid, siteid)) return 0; insert(putSiteInfoStmt, 1, waveformeventid, siteid, stationid, numchannels, date); putSiteInfoStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } return 0; } private int insert(PreparedStatement stmt, int index, int waveformeventid, int refid, int refdbid, int numentries, MicroSecondDate date) { try { stmt.setInt(index++, waveformeventid); stmt.setInt(index++, refid); stmt.setInt(index++, refdbid); stmt.setInt(index++, numentries); stmt.setTimestamp(index++, date.getTimestamp()); } catch(SQLException sqle) { sqle.printStackTrace(); } return index; } public boolean isSiteInfoInserted(int waveformeventid, int siteid) { try { isSiteInfoIns.setInt(1, waveformeventid); isSiteInfoIns.setInt(2, siteid); ResultSet rs = isSiteInfoIns.executeQuery(); if(rs.next()) return true; } catch(SQLException sqle) { sqle.printStackTrace(); } return false; } public int putChannelInfo(int waveformeventid, int channelid, int siteid, MicroSecondDate date) { try { if(isChannelInfoInserted(waveformeventid, channelid)) return 0; putChannelInfoStmt.setInt(1, waveformeventid); putChannelInfoStmt.setInt(2, channelid); putChannelInfoStmt.setInt(3, siteid); putChannelInfoStmt.setTimestamp(4, date.getTimestamp()); putChannelInfoStmt.setInt(5, Status.NEW.getId()); putChannelInfoStmt.setInt(6, 0); putChannelInfoStmt.setString(7, ""); putChannelInfoStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } return 0; } private boolean isChannelInfoInserted(int waveformeventid, int channelid) { try { isChannelInfoIns.setInt(1, waveformeventid); isChannelInfoIns.setInt(2, channelid); ResultSet rs = isChannelInfoIns.executeQuery(); if(rs.next()) return true; } catch(SQLException sqle) { sqle.printStackTrace(); } return false; } public void decrementNetworkCount(int waveformeventid) { try { networkCountStmt.setInt(1, -1); networkCountStmt.setInt(2, waveformeventid); networkCountStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public void decrementStationCount(int waveformeventid, int networkid) { try { insertForIncrDecr(stationCountStmt, 1, -1, waveformeventid, networkid); stationCountStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public void decrementSiteCount(int waveformeventid, int stationid) { try { insertForIncrDecr(siteCountStmt, 1, -1, waveformeventid, stationid); siteCountStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public void decrementChannelCount(int waveformeventid, int siteid) { try { // synchronized(connection) { insertForIncrDecr(channelCountStmt, 1, -1, waveformeventid, siteid); int count = getChannelCount(waveformeventid, siteid); channelCountStmt.executeUpdate(); //} } catch(SQLException sqle) { sqle.printStackTrace(); } } public void incrementNetworkCount(int waveformeventid) { try { networkCountStmt.setInt(1, 1); networkCountStmt.setInt(2, waveformeventid); networkCountStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public void incrementStationCount(int waveformeventid, int networkid) { try { insertForIncrDecr(stationCountStmt, 1, 1, waveformeventid, networkid); stationCountStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public void incrementSiteCount(int waveformeventid, int stationid) { try { insertForIncrDecr(siteCountStmt, 1, 1, waveformeventid, stationid); siteCountStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public void incrementChannelCount(int waveformeventid, int siteid) { try { insertForIncrDecr(channelCountStmt, 1, 1, waveformeventid, siteid); channelCountStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } private int insertForIncrDecr( PreparedStatement stmt, int index, int incrvalue, int eventid, int paramid) { try { stmt.setInt(index++, incrvalue); stmt.setInt(index++, eventid); stmt.setInt(index++, paramid); } catch(SQLException sqle) { sqle.printStackTrace(); } return index; } public int getNetworkCount(int waveformeventid) { try { getNetCountStmt.setInt(1, waveformeventid); ResultSet rs = getNetCountStmt.executeQuery(); if(rs.next()) return rs.getInt(1); } catch(SQLException sqle) { sqle.printStackTrace(); } return -1; } public int getStationCount(int waveformeventid, int networkid) { try { getStationCountStmt.setInt(1, waveformeventid); getStationCountStmt.setInt(2, networkid); ResultSet rs = getStationCountStmt.executeQuery(); if(rs.next()) return rs.getInt(1); } catch(SQLException sqle) { sqle.printStackTrace(); } return -1; } public int getSiteCount(int waveformeventid, int stationid) { try { getSiteCountStmt.setInt(1, waveformeventid); getSiteCountStmt.setInt(2, stationid); ResultSet rs = getSiteCountStmt.executeQuery(); if(rs.next()) return rs.getInt(1); } catch(SQLException sqle) { sqle.printStackTrace(); } return -1; } public int getChannelCount(int waveformeventid, int siteid) { try { getChannelCountStmt.setInt(1, waveformeventid); getChannelCountStmt.setInt(2, siteid); ResultSet rs = getChannelCountStmt.executeQuery(); if(rs.next()) return rs.getInt(1); } catch(SQLException sqle) { sqle.printStackTrace(); } return -1; } public int unfinishedNetworkCount(int waveformeventid){ try { insertForUnfinishedCount(unfinishedNetworkCountStmt, 1, waveformeventid, waveformeventid, 0); ResultSet rs = unfinishedNetworkCountStmt.executeQuery(); if(rs.next()) { return rs.getInt(1); } } catch(SQLException sqle) { sqle.printStackTrace(); } return 0; } public int unfinishedStationCount(int waveformeventid, int networkid){ try { insertForUnfinishedCount(unfinishedStationCountStmt, 1, waveformeventid, networkid, 0); ResultSet rs = unfinishedStationCountStmt.executeQuery(); if(rs.next()) { return rs.getInt(1); } } catch(SQLException sqle) { sqle.printStackTrace(); } return 0; } public int unfinishedSiteCount(int waveformeventid, int stationid){ try { insertForUnfinishedCount(unfinishedSiteCountStmt, 1, waveformeventid, stationid, 0); ResultSet rs = unfinishedSiteCountStmt.executeQuery(); if(rs.next()) { return rs.getInt(1); } } catch(SQLException sqle) { sqle.printStackTrace(); } return 0; } public int unfinishedChannelCount(int waveformeventid, int siteid){ try { unfinishedChannelCountStmt.setInt(1, waveformeventid); unfinishedChannelCountStmt.setInt(2, siteid); ResultSet rs = unfinishedChannelCountStmt.executeQuery(); if(rs.next()) { return rs.getInt(1); } } catch(SQLException sqle) { sqle.printStackTrace(); } return 0; } private int insertForUnfinishedCount(PreparedStatement stmt, int index, int waveformeventid, int refid, int number) { try { stmt.setInt(index++, waveformeventid); stmt.setInt(index++, refid); stmt.setInt(index++, number); } catch(SQLException sqle) { sqle.printStackTrace(); } return index; } public int getChannelDbId(int waveformeventid, int waveformchannelid) { try { getIdStmt.setInt(1, waveformeventid); getIdStmt.setInt(2, waveformchannelid); ResultSet rs = getIdStmt.executeQuery(); if(rs.next()) { return rs.getInt(1); } } catch(SQLException sqle) { sqle.printStackTrace(); } return -1; } public int getFirst() { try { getByStatusStmt.setInt(1, Status.NEW.getId()); ResultSet rs = null; // synchronized(connection) { rs = getByStatusStmt.executeQuery(); // } if(rs.next()) { int rtnValue = rs.getInt(1); updateStatus(rtnValue, Status.PROCESSING); return rtnValue; } } catch(SQLException sqle) { sqle.printStackTrace(); } return -1; } public void updateStatus(int waveformid, Status newStatus) { updateStatus(waveformid, newStatus, ""); } public void updateStatus(int waveformid, Status newStatus, String reason) { try { updateStatusStmt.setInt(1, newStatus.getId()); updateStatusStmt.setString(2, reason); updateStatusStmt.setInt(3, waveformid); updateStatusStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public void updateStatus(Status oldStatus, Status newStatus) { try { statusUpdateStmt.setInt(1, newStatus.getId()); statusUpdateStmt.setInt(2, oldStatus.getId()); statusUpdateStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public int[] getByStatus(Status status) { ArrayList arrayList = new ArrayList(); try { getByStatusStmt.setInt(1, status.getId()); ResultSet rs = getByStatusStmt.executeQuery(); while(rs.next()) { arrayList.add(new Integer(rs.getInt(1))); } } catch(SQLException sqle) { sqle.printStackTrace(); } int[] rtnValues = new int[arrayList.size()]; for(int counter = 0; counter < arrayList.size(); counter++) { rtnValues[counter] = ((Integer)arrayList.get(counter)).intValue(); } return rtnValues; } public int getWaveformEventId(int dbid) { try { getStmt.setInt(1, dbid); ResultSet rs = getStmt.executeQuery(); if(rs.next()) { return rs.getInt("waveformeventid"); } } catch(SQLException sqle) { sqle.printStackTrace(); } return -1; } public int getWaveformChannelId(int dbid) { try { getStmt.setInt(1, dbid); ResultSet rs = getStmt.executeQuery(); if(rs.next()) { return rs.getInt("waveformchannelid"); } } catch(SQLException sqle) { sqle.printStackTrace(); } return -1; } public void delete(int waveformEventid) { try { deleteByEventIdStmt.setInt(1, waveformEventid); deleteByEventIdStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public void deleteInfo(int waveformeventid) { try { delInfoStmt.setInt(1, waveformeventid); delInfoStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public void deleteNetworkInfo(int waveformeventid, int networkid) { try { delNetworkInfoStmt.setInt(1, waveformeventid); delNetworkInfoStmt.setInt(2, networkid); delNetworkInfoStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public void deleteStationInfo(int waveformeventid, int stationid) { try { delStationInfoStmt.setInt(1, waveformeventid); delStationInfoStmt.setInt(2, stationid); delStationInfoStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public void deleteSiteInfo(int waveformeventid, int siteid) { try { delSiteInfoStmt.setInt(1, waveformeventid); delSiteInfoStmt.setInt(2, siteid); delSiteInfoStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public void deleteChannelInfo(int waveformeventid, int channelid) { try { delChannelInfoStmt.setInt(1, waveformeventid); delChannelInfoStmt.setInt(2, channelid); delChannelInfoStmt.executeUpdate(); } catch(SQLException sqle) { sqle.printStackTrace(); } } public int[] getIds(int eventid) { ArrayList arrayList = new ArrayList(); try { getIdsStmt.setInt(1, Status.COMPLETE_SUCCESS.getId()); getIdsStmt.setInt(2, Status.COMPLETE_REJECT.getId()); getIdsStmt.setInt(3, eventid); ResultSet rs = getIdsStmt.executeQuery(); while(rs.next()) { arrayList.add(new Integer(rs.getInt(1))); } } catch(SQLException sqle) { sqle.printStackTrace(); } int[] rtnValues = new int[arrayList.size()]; for(int counter = 0; counter < arrayList.size(); counter++) { rtnValues[counter] = ((Integer)arrayList.get(counter)).intValue(); } return rtnValues; } public int[] getIds() { int[] eventids = getUnfinishedEvents(); int[] rtnValues = new int[0]; for(int counter = 0; counter < eventids.length; counter++) { int[] ids = getIds(eventids[counter]); int[] tmp = new int[rtnValues.length + ids.length]; System.arraycopy(rtnValues, 0, tmp, 0, rtnValues.length); System.arraycopy(ids, 0, tmp, rtnValues.length, ids.length); rtnValues = tmp; } return rtnValues; } public int[] getUnfinishedEvents() { ArrayList arrayList = new ArrayList(); try { unfinishedEventsStmt.setInt(1, 0); ResultSet rs = unfinishedEventsStmt.executeQuery(); while(rs.next()) { arrayList.add(new Integer(rs.getInt(1))); } } catch(SQLException sqle) { sqle.printStackTrace(); } int[] rtnValues = new int[arrayList.size()]; for(int counter = 0 ; counter< arrayList.size(); counter++) { rtnValues[counter] = ((Integer)arrayList.get(counter)).intValue(); } return rtnValues; } public void delete(String tableName) { try { connection.createStatement().execute(deleteStmt+tableName); } catch(SQLException sqle) { sqle.printStackTrace(); } } public void clean() { delete("waveformdb"); delete("waveformnetworkdb"); delete("waveformstationdb"); delete("waveformsitedb"); delete("waveformchanneldb"); } public Connection getConnection() { return this.connection; } protected Connection connection; private PreparedStatement getStmt; private PreparedStatement getIdStmt; private PreparedStatement getByStatusStmt; private PreparedStatement updateStatusStmt; private PreparedStatement statusUpdateStmt; private PreparedStatement deleteByEventIdStmt; private PreparedStatement putInfoStmt; private PreparedStatement putNetInfoStmt; private PreparedStatement putStationInfoStmt; private PreparedStatement putSiteInfoStmt; private PreparedStatement putChannelInfoStmt; private PreparedStatement channelCountStmt;; private PreparedStatement networkCountStmt; private PreparedStatement stationCountStmt; private PreparedStatement siteCountStmt; private PreparedStatement getNetCountStmt; private PreparedStatement getStationCountStmt; private PreparedStatement getSiteCountStmt; private PreparedStatement getChannelCountStmt; private PreparedStatement unfinishedNetworkCountStmt; private PreparedStatement unfinishedStationCountStmt; private PreparedStatement unfinishedSiteCountStmt; private PreparedStatement unfinishedChannelCountStmt; private PreparedStatement delInfoStmt; private PreparedStatement delNetworkInfoStmt; private PreparedStatement delStationInfoStmt; private PreparedStatement delSiteInfoStmt; private PreparedStatement delChannelInfoStmt; private PreparedStatement isInfoIns; private PreparedStatement isNetworkInfoIns; private PreparedStatement isStationInfoIns; private PreparedStatement isSiteInfoIns; private PreparedStatement isChannelInfoIns; private PreparedStatement getIdsStmt; private String deleteStmt; private PreparedStatement unfinishedEventsStmt; static Category logger = Category.getInstance(AbstractWaveformDatabase.class.getName()); }// AbstractWaveformDatabase
true
true
private void init() { try { create(); getStmt = connection.prepareStatement(" SELECT waveformeventid, waveformchannelid "+ " FROM waveformchanneldb WHERE waveformid = ? "); getIdStmt = connection.prepareStatement(" SELECT waveformid FROM "+ " waveformchanneldb "+ " WHERE waveformeventid = ? AND "+ " waveformchannelid =? "); getByStatusStmt = connection.prepareStatement(" SELECT waveformid FROM waveformchanneldb "+ " WHERE status = ? "); updateStatusStmt = connection.prepareStatement(" UPDATE waveformchanneldb "+ " SET status = ? , "+ " reason = ? "+ " WHERE waveformid = ? "); statusUpdateStmt = connection.prepareStatement(" UPDATE waveformchanneldb set status = ? "+ " WHERE status = ? "); deleteByEventIdStmt = connection.prepareStatement(" DELETE FROM waveformchanneldb "+ " WHERE waveformeventid = ? "); putInfoStmt = connection.prepareStatement(" INSERT INTO waveformdb "+ " VALUES(?,?)"); putNetInfoStmt = connection.prepareStatement(" INSERT INTO waveformnetworkdb "+ " VALUES(?, ?, ?, ?) "); putStationInfoStmt = connection.prepareStatement(" INSERT INTO waveformstationdb "+ " VALUES(?, ?, ?, ?, ?)"); putSiteInfoStmt = connection.prepareStatement(" INSERT INTO waveformsitedb "+ " VALUES(?, ?, ?, ?, ?)"); putChannelInfoStmt = connection.prepareStatement(" INSERT INTO waveformchanneldb "+ " (waveformeventid, waveformchannelid, waveformsiteid, "+ " qtime, status, numretrys, reason ) "+ " VALUES(?, ?, ?, ?, ?, ?, ?)"); networkCountStmt = connection.prepareStatement(" UPDATE waveformdb "+ " SET numnetworks = numnetworks + ? "+ " WHERE waveformeventid = ? "); stationCountStmt = connection.prepareStatement(" UPDATE waveformnetworkdb "+ " SET numstations = numstations + ? "+ " WHERE waveformeventid = ? AND "+ " waveformnetworkid = ? "); siteCountStmt = connection.prepareStatement(" UPDATE waveformstationdb "+ " SET numsites = numsites + ? "+ " WHERE waveformeventid = ? AND "+ " waveformstationid = ? "); channelCountStmt = connection.prepareStatement(" UPDATE waveformsitedb "+ " SET numchannels = numchannels + ? "+ " WHERE waveformeventid = ? AND "+ " waveformsiteid = ? "); getNetCountStmt = connection.prepareStatement(" SELECT numnetworks from waveformdb "+ " WHERE waveformeventid = ? "); getStationCountStmt = connection.prepareStatement(" SELECT numstations from "+ " waveformnetworkdb " + " WHERE waveformeventid = ? AND "+ " waveformnetworkid = ? "); getSiteCountStmt = connection.prepareStatement(" SELECT numsites from "+ " waveformstationdb "+ " WHERE waveformeventid = ? AND "+ " waveformstationid = ? "); getChannelCountStmt = connection.prepareStatement(" SELECT numchannels from "+ " waveformsitedb "+ " WHERE waveformeventid = ? AND "+ " waveformsiteid = ? "); delInfoStmt = connection.prepareStatement(" DELETE FROM waveformdb "+ " WHERE waveformeventid = ? "); delNetworkInfoStmt = connection.prepareStatement(" DELETE FROM waveformnetworkdb "+ " WHERE waveformeventid = ? AND "+ " waveformnetworkid = ? "); delStationInfoStmt = connection.prepareStatement( " DELETE FROM waveformstationdb "+ " WHERE waveformeventid = ? AND "+ " waveformstationid = ? "); delSiteInfoStmt = connection.prepareStatement(" DELETE FROM waveformsitedb "+ " WHERE waveformeventid = ? AND "+ " waveformsiteid = ? "); delChannelInfoStmt = connection.prepareStatement(" DELETE FROM waveformchanneldb "+ " WHERE waveformeventid = ? AND "+ " waveformchannelid = ? "); isInfoIns = connection.prepareStatement(" SELECT * from waveformdb "+ " WHERE waveformeventid = ? "); isNetworkInfoIns = connection.prepareStatement(" SELECT * from waveformnetworkdb "+ " WHERE waveformeventid = ? AND "+ " waveformnetworkid = ? "); isStationInfoIns = connection.prepareStatement(" SELECT * from waveformstationdb "+ " WHERE waveformeventid = ? AND "+ " waveformstationid = ? "); isSiteInfoIns = connection.prepareStatement(" SELECT * from waveformsitedb "+ " WHERE waveformeventid = ? AND "+ " waveformsiteid = ? " ); isChannelInfoIns = connection.prepareStatement(" SELECT * from waveformchanneldb "+ " WHERE waveformeventid = ? AND "+ " waveformchannelid = ? "); getIdsStmt = connection.prepareStatement(" SELECT waveformid from "+ " waveformchanneldb "+ " WHERE ( status = ? OR "+ " status = ? ) AND "+ " waveformeventid = ? "); unfinishedEventsStmt = connection.prepareStatement("SELECT waveformeventid FROM "+ " waveformdb WHERE "+ " numNetworks > ? "); unfinishedNetworkCountStmt = connection.prepareStatement("SELECT count(*) FROM "+ " waveformnetworkdb WHERE "+ " waveformeventid = ? AND "+ " waveformeventid = ? AND "+ " numStations > ? "); unfinishedStationCountStmt = connection.prepareStatement("SELECT count(*) FROM "+ " waveformstationdb WHERE "+ " waveformeventid = ? AND "+ " waveformnetworkid = ? AND "+ " numSites > ?"); unfinishedSiteCountStmt = connection.prepareStatement("SELECT count(*) FROM "+ " waveformsitedb WHERE "+ " waveformeventid = ? AND "+ " waveformstationid = ? AND "+ " numchannels > ? "); unfinishedChannelCountStmt = connection.prepareStatement("SELECT count(*) FROM "+ " waveformchanneldb WHERE "+ " waveformeventid = ? AND "+ " waveformsiteid = ? AND "+ " (status = 0 OR status = 1) "); deleteStmt = "DELETE FROM "; } catch(SQLException sqle) { sqle.printStackTrace(); } }
private void init() { try { create(); getStmt = connection.prepareStatement(" SELECT waveformeventid, waveformchannelid "+ " FROM waveformchanneldb WHERE waveformid = ? "); getIdStmt = connection.prepareStatement(" SELECT waveformid FROM "+ " waveformchanneldb "+ " WHERE waveformeventid = ? AND "+ " waveformchannelid =? "); getByStatusStmt = connection.prepareStatement(" SELECT waveformid FROM waveformchanneldb "+ " WHERE status = ? ORDER BY waveformeventid"); updateStatusStmt = connection.prepareStatement(" UPDATE waveformchanneldb "+ " SET status = ? , "+ " reason = ? "+ " WHERE waveformid = ? "); statusUpdateStmt = connection.prepareStatement(" UPDATE waveformchanneldb set status = ? "+ " WHERE status = ? "); deleteByEventIdStmt = connection.prepareStatement(" DELETE FROM waveformchanneldb "+ " WHERE waveformeventid = ? "); putInfoStmt = connection.prepareStatement(" INSERT INTO waveformdb "+ " VALUES(?,?)"); putNetInfoStmt = connection.prepareStatement(" INSERT INTO waveformnetworkdb "+ " VALUES(?, ?, ?, ?) "); putStationInfoStmt = connection.prepareStatement(" INSERT INTO waveformstationdb "+ " VALUES(?, ?, ?, ?, ?)"); putSiteInfoStmt = connection.prepareStatement(" INSERT INTO waveformsitedb "+ " VALUES(?, ?, ?, ?, ?)"); putChannelInfoStmt = connection.prepareStatement(" INSERT INTO waveformchanneldb "+ " (waveformeventid, waveformchannelid, waveformsiteid, "+ " qtime, status, numretrys, reason ) "+ " VALUES(?, ?, ?, ?, ?, ?, ?)"); networkCountStmt = connection.prepareStatement(" UPDATE waveformdb "+ " SET numnetworks = numnetworks + ? "+ " WHERE waveformeventid = ? "); stationCountStmt = connection.prepareStatement(" UPDATE waveformnetworkdb "+ " SET numstations = numstations + ? "+ " WHERE waveformeventid = ? AND "+ " waveformnetworkid = ? "); siteCountStmt = connection.prepareStatement(" UPDATE waveformstationdb "+ " SET numsites = numsites + ? "+ " WHERE waveformeventid = ? AND "+ " waveformstationid = ? "); channelCountStmt = connection.prepareStatement(" UPDATE waveformsitedb "+ " SET numchannels = numchannels + ? "+ " WHERE waveformeventid = ? AND "+ " waveformsiteid = ? "); getNetCountStmt = connection.prepareStatement(" SELECT numnetworks from waveformdb "+ " WHERE waveformeventid = ? "); getStationCountStmt = connection.prepareStatement(" SELECT numstations from "+ " waveformnetworkdb " + " WHERE waveformeventid = ? AND "+ " waveformnetworkid = ? "); getSiteCountStmt = connection.prepareStatement(" SELECT numsites from "+ " waveformstationdb "+ " WHERE waveformeventid = ? AND "+ " waveformstationid = ? "); getChannelCountStmt = connection.prepareStatement(" SELECT numchannels from "+ " waveformsitedb "+ " WHERE waveformeventid = ? AND "+ " waveformsiteid = ? "); delInfoStmt = connection.prepareStatement(" DELETE FROM waveformdb "+ " WHERE waveformeventid = ? "); delNetworkInfoStmt = connection.prepareStatement(" DELETE FROM waveformnetworkdb "+ " WHERE waveformeventid = ? AND "+ " waveformnetworkid = ? "); delStationInfoStmt = connection.prepareStatement( " DELETE FROM waveformstationdb "+ " WHERE waveformeventid = ? AND "+ " waveformstationid = ? "); delSiteInfoStmt = connection.prepareStatement(" DELETE FROM waveformsitedb "+ " WHERE waveformeventid = ? AND "+ " waveformsiteid = ? "); delChannelInfoStmt = connection.prepareStatement(" DELETE FROM waveformchanneldb "+ " WHERE waveformeventid = ? AND "+ " waveformchannelid = ? "); isInfoIns = connection.prepareStatement(" SELECT * from waveformdb "+ " WHERE waveformeventid = ? "); isNetworkInfoIns = connection.prepareStatement(" SELECT * from waveformnetworkdb "+ " WHERE waveformeventid = ? AND "+ " waveformnetworkid = ? "); isStationInfoIns = connection.prepareStatement(" SELECT * from waveformstationdb "+ " WHERE waveformeventid = ? AND "+ " waveformstationid = ? "); isSiteInfoIns = connection.prepareStatement(" SELECT * from waveformsitedb "+ " WHERE waveformeventid = ? AND "+ " waveformsiteid = ? " ); isChannelInfoIns = connection.prepareStatement(" SELECT * from waveformchanneldb "+ " WHERE waveformeventid = ? AND "+ " waveformchannelid = ? "); getIdsStmt = connection.prepareStatement(" SELECT waveformid from "+ " waveformchanneldb "+ " WHERE ( status = ? OR "+ " status = ? ) AND "+ " waveformeventid = ? "); unfinishedEventsStmt = connection.prepareStatement("SELECT waveformeventid FROM "+ " waveformdb WHERE "+ " numNetworks > ? "); unfinishedNetworkCountStmt = connection.prepareStatement("SELECT count(*) FROM "+ " waveformnetworkdb WHERE "+ " waveformeventid = ? AND "+ " waveformeventid = ? AND "+ " numStations > ? "); unfinishedStationCountStmt = connection.prepareStatement("SELECT count(*) FROM "+ " waveformstationdb WHERE "+ " waveformeventid = ? AND "+ " waveformnetworkid = ? AND "+ " numSites > ?"); unfinishedSiteCountStmt = connection.prepareStatement("SELECT count(*) FROM "+ " waveformsitedb WHERE "+ " waveformeventid = ? AND "+ " waveformstationid = ? AND "+ " numchannels > ? "); unfinishedChannelCountStmt = connection.prepareStatement("SELECT count(*) FROM "+ " waveformchanneldb WHERE "+ " waveformeventid = ? AND "+ " waveformsiteid = ? AND "+ " (status = 0 OR status = 1) "); deleteStmt = "DELETE FROM "; } catch(SQLException sqle) { sqle.printStackTrace(); } }
diff --git a/src/cz/zcu/kiv/eeg/mobile/base/ws/asynctask/FetchExperiments.java b/src/cz/zcu/kiv/eeg/mobile/base/ws/asynctask/FetchExperiments.java index 55f53a5..321114c 100644 --- a/src/cz/zcu/kiv/eeg/mobile/base/ws/asynctask/FetchExperiments.java +++ b/src/cz/zcu/kiv/eeg/mobile/base/ws/asynctask/FetchExperiments.java @@ -1,126 +1,126 @@ package cz.zcu.kiv.eeg.mobile.base.ws.asynctask; import android.content.SharedPreferences; import android.util.Log; import cz.zcu.kiv.eeg.mobile.base.R; import cz.zcu.kiv.eeg.mobile.base.archetypes.CommonActivity; import cz.zcu.kiv.eeg.mobile.base.archetypes.CommonService; import cz.zcu.kiv.eeg.mobile.base.data.Values; import cz.zcu.kiv.eeg.mobile.base.data.adapter.ExperimentAdapter; import cz.zcu.kiv.eeg.mobile.base.data.container.xml.Experiment; import cz.zcu.kiv.eeg.mobile.base.data.container.xml.ExperimentList; import cz.zcu.kiv.eeg.mobile.base.data.container.xml.RecordCount; import cz.zcu.kiv.eeg.mobile.base.ws.ssl.SSLSimpleClientHttpRequestFactory; import org.springframework.http.*; import org.springframework.http.converter.xml.SimpleXmlHttpMessageConverter; import org.springframework.web.client.RestTemplate; import java.util.Collections; import java.util.Comparator; import java.util.List; import static cz.zcu.kiv.eeg.mobile.base.data.ServiceState.*; /** * Common service (Asynctask) for fetching experiments from server. * * @author Petr Miko */ public class FetchExperiments extends CommonService<Void, Void, List<Experiment>> { private static final String TAG = FetchExperiments.class.getSimpleName(); private ExperimentAdapter experimentAdapter; private String qualifier; /** * Constructor. * * @param activity parent activity * @param experimentAdapter adapter for holding collection of experiments * @param qualifier qualifier to distinguish whether to fetch private or public data */ public FetchExperiments(CommonActivity activity, ExperimentAdapter experimentAdapter, String qualifier) { super(activity); this.experimentAdapter = experimentAdapter; this.qualifier = qualifier; } /** * Method, where all experiments are read from server. * All heavy lifting is made here. * * @param params not used (omitted) here * @return list of fetched experiments */ @Override protected List<Experiment> doInBackground(Void... params) { SharedPreferences credentials = getCredentials(); String username = credentials.getString("username", null); String password = credentials.getString("password", null); String url = credentials.getString("url", null) + Values.SERVICE_EXPERIMENTS; setState(RUNNING, R.string.working_ws_experiments); HttpAuthentication authHeader = new HttpBasicAuthentication(username, password); HttpHeaders requestHeaders = new HttpHeaders(); requestHeaders.setAuthorization(authHeader); requestHeaders.setAccept(Collections.singletonList(MediaType.APPLICATION_XML)); HttpEntity<Object> entity = new HttpEntity<Object>(requestHeaders); SSLSimpleClientHttpRequestFactory factory = new SSLSimpleClientHttpRequestFactory(); // Create a new RestTemplate instance RestTemplate restTemplate = new RestTemplate(factory); restTemplate.getMessageConverters().add(new SimpleXmlHttpMessageConverter()); + try { - //obtain all public records if qualifier is all - if (Values.SERVICE_QUALIFIER_ALL.equals(qualifier)) { - String countUrl = url + "count"; - ResponseEntity<RecordCount> count = restTemplate.exchange(countUrl, HttpMethod.GET, entity, RecordCount.class); + //obtain all public records if qualifier is all + if (Values.SERVICE_QUALIFIER_ALL.equals(qualifier)) { + String countUrl = url + "count"; + ResponseEntity<RecordCount> count = restTemplate.exchange(countUrl, HttpMethod.GET, entity, RecordCount.class); - url += "public/" + count.getBody().getPublicRecords(); - } else - url += qualifier; + url += "public/" + count.getBody().getPublicRecords(); + } else + url += qualifier; - try { // Make the network request Log.d(TAG, url); ResponseEntity<ExperimentList> response = restTemplate.exchange(url, HttpMethod.GET, entity, ExperimentList.class); ExperimentList body = response.getBody(); if (body != null) { return body.getExperiments(); } } catch (Exception e) { Log.e(TAG, e.getLocalizedMessage(), e); setState(ERROR, e); } finally { setState(DONE); } return Collections.emptyList(); } /** * Read experiments are assigned to adapter here. * * @param resultList experiments fetched from server */ @Override protected void onPostExecute(List<Experiment> resultList) { experimentAdapter.clear(); if (resultList != null && !resultList.isEmpty()) { Collections.sort(resultList, new Comparator<Experiment>() { @Override public int compare(Experiment lhs, Experiment rhs) { return lhs.getExperimentId() - rhs.getExperimentId(); } }); for (Experiment res : resultList) { experimentAdapter.add(res); } } } }
false
true
protected List<Experiment> doInBackground(Void... params) { SharedPreferences credentials = getCredentials(); String username = credentials.getString("username", null); String password = credentials.getString("password", null); String url = credentials.getString("url", null) + Values.SERVICE_EXPERIMENTS; setState(RUNNING, R.string.working_ws_experiments); HttpAuthentication authHeader = new HttpBasicAuthentication(username, password); HttpHeaders requestHeaders = new HttpHeaders(); requestHeaders.setAuthorization(authHeader); requestHeaders.setAccept(Collections.singletonList(MediaType.APPLICATION_XML)); HttpEntity<Object> entity = new HttpEntity<Object>(requestHeaders); SSLSimpleClientHttpRequestFactory factory = new SSLSimpleClientHttpRequestFactory(); // Create a new RestTemplate instance RestTemplate restTemplate = new RestTemplate(factory); restTemplate.getMessageConverters().add(new SimpleXmlHttpMessageConverter()); //obtain all public records if qualifier is all if (Values.SERVICE_QUALIFIER_ALL.equals(qualifier)) { String countUrl = url + "count"; ResponseEntity<RecordCount> count = restTemplate.exchange(countUrl, HttpMethod.GET, entity, RecordCount.class); url += "public/" + count.getBody().getPublicRecords(); } else url += qualifier; try { // Make the network request Log.d(TAG, url); ResponseEntity<ExperimentList> response = restTemplate.exchange(url, HttpMethod.GET, entity, ExperimentList.class); ExperimentList body = response.getBody(); if (body != null) { return body.getExperiments(); } } catch (Exception e) { Log.e(TAG, e.getLocalizedMessage(), e); setState(ERROR, e); } finally { setState(DONE); } return Collections.emptyList(); }
protected List<Experiment> doInBackground(Void... params) { SharedPreferences credentials = getCredentials(); String username = credentials.getString("username", null); String password = credentials.getString("password", null); String url = credentials.getString("url", null) + Values.SERVICE_EXPERIMENTS; setState(RUNNING, R.string.working_ws_experiments); HttpAuthentication authHeader = new HttpBasicAuthentication(username, password); HttpHeaders requestHeaders = new HttpHeaders(); requestHeaders.setAuthorization(authHeader); requestHeaders.setAccept(Collections.singletonList(MediaType.APPLICATION_XML)); HttpEntity<Object> entity = new HttpEntity<Object>(requestHeaders); SSLSimpleClientHttpRequestFactory factory = new SSLSimpleClientHttpRequestFactory(); // Create a new RestTemplate instance RestTemplate restTemplate = new RestTemplate(factory); restTemplate.getMessageConverters().add(new SimpleXmlHttpMessageConverter()); try { //obtain all public records if qualifier is all if (Values.SERVICE_QUALIFIER_ALL.equals(qualifier)) { String countUrl = url + "count"; ResponseEntity<RecordCount> count = restTemplate.exchange(countUrl, HttpMethod.GET, entity, RecordCount.class); url += "public/" + count.getBody().getPublicRecords(); } else url += qualifier; // Make the network request Log.d(TAG, url); ResponseEntity<ExperimentList> response = restTemplate.exchange(url, HttpMethod.GET, entity, ExperimentList.class); ExperimentList body = response.getBody(); if (body != null) { return body.getExperiments(); } } catch (Exception e) { Log.e(TAG, e.getLocalizedMessage(), e); setState(ERROR, e); } finally { setState(DONE); } return Collections.emptyList(); }
diff --git a/src/main/java/com/synacor/soa/ark/WildChild.java b/src/main/java/com/synacor/soa/ark/WildChild.java index c18aed8..6582627 100644 --- a/src/main/java/com/synacor/soa/ark/WildChild.java +++ b/src/main/java/com/synacor/soa/ark/WildChild.java @@ -1,115 +1,115 @@ package com.synacor.soa.ark; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher.Event.EventType; import org.apache.zookeeper.Watcher.Event.KeeperState; import com.netflix.curator.framework.CuratorFramework; import com.netflix.curator.framework.api.CuratorWatcher; /** * Sets and re-creates watches as necessary to notify a client of create/delete events for a path that may include wildcards. * The path format is (/name)+ where name may be a regular expression which must not include the '/' character. * examples: * /services/a*b/deployments/1\.*.0/instances/.*-test/lifecycleState * /services/a*b/deployments/1\.*.0/instances/.*-test/autoScaling * /services/a*b/deployments/[1-4]*\.0\.0/instances/.* * usage: * List<String> initialLeaves = new WildChild(client, "/services/*", watcher).getMatchingLeaves(); * * The CuratorWatcher will be called initially for all pre-existing matching leaves, and can be used to build the initial list of matches. */ public class WildChild { CuratorFramework client; private String path; private String wildPath; private String matchCriteria; // the next part of the fullPath private CuratorWatcher leafWatcher; private Set<String> trackedChildren = new HashSet<String>(); public WildChild(CuratorFramework client, String wildPath, CuratorWatcher leafWatcher) throws Exception { this(client, "", wildPath, leafWatcher); } private WildChild(CuratorFramework client, String path, String wildPath, CuratorWatcher leafWatcher) throws Exception { this.client = client; this.path = path; this.wildPath = wildPath; this.leafWatcher = leafWatcher; int index = path.split("/").length; this.matchCriteria = wildPath.split("/")[index]; List<String> children = client.getChildren().usingWatcher(new WildChildWatcher()).forPath(path); for(String child : children) { String childPath = path + "/" + child; boolean childIsLeaf = childPath.split("/").length == wildPath.split("/").length; trackedChildren.add(child); if(childIsLeaf) { WatchedEvent createdEvent = new WatchedEvent(EventType.NodeCreated, KeeperState.SyncConnected, childPath); leafWatcher.process(createdEvent); } else { new WildChild(client, childPath, wildPath, leafWatcher); } } } /** * Watcher class that monitors a node for child changes, * and manages setting watchers on child nodes or notifying the primary watcher. */ private class WildChildWatcher implements CuratorWatcher { private List<String> getChildrenSafe() { try { return client.getChildren().usingWatcher(this).forPath(path); } catch (KeeperException.NoNodeException exc) { return new ArrayList<String>(); } catch (Exception exc) { throw new RuntimeException(exc); } } public void process(WatchedEvent event) throws Exception { - if(!path.equals(path)) throw new RuntimeException("incorrect path"); + if(!path.equals(event.getPath())) throw new RuntimeException("incorrect path"); boolean childIsLeaf = wildPath.split("/").length == path.split("/").length+1; if(event.getType() == EventType.NodeChildrenChanged) { List<String> children = getChildrenSafe(); // Remove missing children from tracking list Set<String> trackedCopy = new HashSet<String>(trackedChildren); for(String trackedChild : trackedCopy) { if(!children.contains(trackedChild)) { trackedChildren.remove(trackedChild); if(childIsLeaf) { WatchedEvent deletedEvent = new WatchedEvent(EventType.NodeDeleted, KeeperState.SyncConnected, path + "/" + trackedChild); leafWatcher.process(deletedEvent); } } } // Add new children to tracking list for(String child : children) { if(child.matches(matchCriteria) && !trackedChildren.contains(child)) { String childPath = path + "/" + child; if(childIsLeaf) { WatchedEvent createdEvent = new WatchedEvent(EventType.NodeCreated, KeeperState.SyncConnected, childPath); leafWatcher.process(createdEvent); } else { new WildChild(client, childPath, wildPath, leafWatcher); } trackedChildren.add(child); } } } } } }
true
true
public void process(WatchedEvent event) throws Exception { if(!path.equals(path)) throw new RuntimeException("incorrect path"); boolean childIsLeaf = wildPath.split("/").length == path.split("/").length+1; if(event.getType() == EventType.NodeChildrenChanged) { List<String> children = getChildrenSafe(); // Remove missing children from tracking list Set<String> trackedCopy = new HashSet<String>(trackedChildren); for(String trackedChild : trackedCopy) { if(!children.contains(trackedChild)) { trackedChildren.remove(trackedChild); if(childIsLeaf) { WatchedEvent deletedEvent = new WatchedEvent(EventType.NodeDeleted, KeeperState.SyncConnected, path + "/" + trackedChild); leafWatcher.process(deletedEvent); } } } // Add new children to tracking list for(String child : children) { if(child.matches(matchCriteria) && !trackedChildren.contains(child)) { String childPath = path + "/" + child; if(childIsLeaf) { WatchedEvent createdEvent = new WatchedEvent(EventType.NodeCreated, KeeperState.SyncConnected, childPath); leafWatcher.process(createdEvent); } else { new WildChild(client, childPath, wildPath, leafWatcher); } trackedChildren.add(child); } } } }
public void process(WatchedEvent event) throws Exception { if(!path.equals(event.getPath())) throw new RuntimeException("incorrect path"); boolean childIsLeaf = wildPath.split("/").length == path.split("/").length+1; if(event.getType() == EventType.NodeChildrenChanged) { List<String> children = getChildrenSafe(); // Remove missing children from tracking list Set<String> trackedCopy = new HashSet<String>(trackedChildren); for(String trackedChild : trackedCopy) { if(!children.contains(trackedChild)) { trackedChildren.remove(trackedChild); if(childIsLeaf) { WatchedEvent deletedEvent = new WatchedEvent(EventType.NodeDeleted, KeeperState.SyncConnected, path + "/" + trackedChild); leafWatcher.process(deletedEvent); } } } // Add new children to tracking list for(String child : children) { if(child.matches(matchCriteria) && !trackedChildren.contains(child)) { String childPath = path + "/" + child; if(childIsLeaf) { WatchedEvent createdEvent = new WatchedEvent(EventType.NodeCreated, KeeperState.SyncConnected, childPath); leafWatcher.process(createdEvent); } else { new WildChild(client, childPath, wildPath, leafWatcher); } trackedChildren.add(child); } } } }
diff --git a/server/src/main/java/org/apache/accumulo/server/tabletserver/Tablet.java b/server/src/main/java/org/apache/accumulo/server/tabletserver/Tablet.java index 72c78c3f7..ca0df8d93 100644 --- a/server/src/main/java/org/apache/accumulo/server/tabletserver/Tablet.java +++ b/server/src/main/java/org/apache/accumulo/server/tabletserver/Tablet.java @@ -1,3872 +1,3872 @@ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.server.tabletserver; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.PriorityQueue; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; import org.apache.accumulo.core.Constants; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.impl.ScannerImpl; import org.apache.accumulo.core.conf.AccumuloConfiguration; import org.apache.accumulo.core.conf.ConfigurationCopy; import org.apache.accumulo.core.conf.ConfigurationObserver; import org.apache.accumulo.core.conf.Property; import org.apache.accumulo.core.constraints.Violations; import org.apache.accumulo.core.data.ByteSequence; import org.apache.accumulo.core.data.Column; import org.apache.accumulo.core.data.ColumnUpdate; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.KeyExtent; import org.apache.accumulo.core.data.KeyValue; import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.data.thrift.IterInfo; import org.apache.accumulo.core.data.thrift.MapFileInfo; import org.apache.accumulo.core.file.FileOperations; import org.apache.accumulo.core.file.FileSKVIterator; import org.apache.accumulo.core.iterators.IterationInterruptedException; import org.apache.accumulo.core.iterators.IteratorEnvironment; import org.apache.accumulo.core.iterators.IteratorUtil; import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope; import org.apache.accumulo.core.iterators.SortedKeyValueIterator; import org.apache.accumulo.core.iterators.system.ColumnFamilySkippingIterator; import org.apache.accumulo.core.iterators.system.ColumnQualifierFilter; import org.apache.accumulo.core.iterators.system.DeletingIterator; import org.apache.accumulo.core.iterators.system.InterruptibleIterator; import org.apache.accumulo.core.iterators.system.MultiIterator; import org.apache.accumulo.core.iterators.system.SourceSwitchingIterator; import org.apache.accumulo.core.iterators.system.SourceSwitchingIterator.DataSource; import org.apache.accumulo.core.iterators.system.StatsIterator; import org.apache.accumulo.core.iterators.system.VisibilityFilter; import org.apache.accumulo.core.master.thrift.TabletLoadState; import org.apache.accumulo.core.metadata.MetadataTable; import org.apache.accumulo.core.metadata.RootTable; import org.apache.accumulo.core.metadata.schema.DataFileValue; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.DataFileColumnFamily; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.LogColumnFamily; import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ScanFileColumnFamily; import org.apache.accumulo.core.security.Authorizations; import org.apache.accumulo.core.security.ColumnVisibility; import org.apache.accumulo.core.security.Credentials; import org.apache.accumulo.core.util.CachedConfiguration; import org.apache.accumulo.core.util.LocalityGroupUtil; import org.apache.accumulo.core.util.LocalityGroupUtil.LocalityGroupConfigurationError; import org.apache.accumulo.core.util.MapCounter; import org.apache.accumulo.core.util.Pair; import org.apache.accumulo.core.util.UtilWaitThread; import org.apache.accumulo.fate.zookeeper.IZooReaderWriter; import org.apache.accumulo.server.ServerConstants; import org.apache.accumulo.server.client.HdfsZooInstance; import org.apache.accumulo.server.conf.TableConfiguration; import org.apache.accumulo.server.constraints.ConstraintChecker; import org.apache.accumulo.server.fs.FileRef; import org.apache.accumulo.server.fs.VolumeManager; import org.apache.accumulo.server.fs.VolumeManager.FileType; import org.apache.accumulo.server.fs.VolumeManagerImpl; import org.apache.accumulo.server.master.state.TServerInstance; import org.apache.accumulo.server.master.tableOps.CompactRange.CompactionIterators; import org.apache.accumulo.server.problems.ProblemReport; import org.apache.accumulo.server.problems.ProblemReports; import org.apache.accumulo.server.problems.ProblemType; import org.apache.accumulo.server.security.SystemCredentials; import org.apache.accumulo.server.tabletserver.Compactor.CompactionCanceledException; import org.apache.accumulo.server.tabletserver.Compactor.CompactionEnv; import org.apache.accumulo.server.tabletserver.FileManager.ScanFileManager; import org.apache.accumulo.server.tabletserver.InMemoryMap.MemoryIterator; import org.apache.accumulo.server.tabletserver.TabletServer.TservConstraintEnv; import org.apache.accumulo.server.tabletserver.TabletServerResourceManager.TabletResourceManager; import org.apache.accumulo.server.tabletserver.TabletStatsKeeper.Operation; import org.apache.accumulo.server.tabletserver.compaction.CompactionPlan; import org.apache.accumulo.server.tabletserver.compaction.CompactionStrategy; import org.apache.accumulo.server.tabletserver.compaction.DefaultCompactionStrategy; import org.apache.accumulo.server.tabletserver.compaction.MajorCompactionReason; import org.apache.accumulo.server.tabletserver.compaction.MajorCompactionRequest; import org.apache.accumulo.server.tabletserver.compaction.WriteParameters; import org.apache.accumulo.server.tabletserver.log.DfsLogger; import org.apache.accumulo.server.tabletserver.log.MutationReceiver; import org.apache.accumulo.server.tabletserver.mastermessage.TabletStatusMessage; import org.apache.accumulo.server.tabletserver.metrics.TabletServerMinCMetrics; import org.apache.accumulo.server.util.FileUtil; import org.apache.accumulo.server.util.MetadataTableUtil; import org.apache.accumulo.server.util.MetadataTableUtil.LogEntry; import org.apache.accumulo.server.util.TabletOperations; import org.apache.accumulo.server.zookeeper.ZooReaderWriter; import org.apache.accumulo.start.classloader.vfs.AccumuloVFSClassLoader; import org.apache.accumulo.trace.instrument.Span; import org.apache.accumulo.trace.instrument.Trace; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.binary.Hex; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.log4j.Logger; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.NoNodeException; /* * We need to be able to have the master tell a tabletServer to * close this file, and the tablet server to handle all pending client reads * before closing * */ /** * * this class just provides an interface to read from a MapFile mostly takes care of reporting start and end keys * * need this because a single row extent can have multiple columns this manages all the columns (each handled by a store) for a single row-extent * * */ public class Tablet { enum MinorCompactionReason { USER, SYSTEM, CLOSE } public class CommitSession { private int seq; private InMemoryMap memTable; private int commitsInProgress; private long maxCommittedTime = Long.MIN_VALUE; private CommitSession(int seq, InMemoryMap imm) { this.seq = seq; this.memTable = imm; commitsInProgress = 0; } public int getWALogSeq() { return seq; } private void decrementCommitsInProgress() { if (commitsInProgress < 1) throw new IllegalStateException("commitsInProgress = " + commitsInProgress); commitsInProgress--; if (commitsInProgress == 0) Tablet.this.notifyAll(); } private void incrementCommitsInProgress() { if (commitsInProgress < 0) throw new IllegalStateException("commitsInProgress = " + commitsInProgress); commitsInProgress++; } private void waitForCommitsToFinish() { while (commitsInProgress > 0) { try { Tablet.this.wait(50); } catch (InterruptedException e) { log.warn(e, e); } } } public void abortCommit(List<Mutation> value) { Tablet.this.abortCommit(this, value); } public void commit(List<Mutation> mutations) { Tablet.this.commit(this, mutations); } public Tablet getTablet() { return Tablet.this; } public boolean beginUpdatingLogsUsed(ArrayList<DfsLogger> copy, boolean mincFinish) { return Tablet.this.beginUpdatingLogsUsed(memTable, copy, mincFinish); } public void finishUpdatingLogsUsed() { Tablet.this.finishUpdatingLogsUsed(); } public int getLogId() { return logId; } public KeyExtent getExtent() { return extent; } private void updateMaxCommittedTime(long time) { maxCommittedTime = Math.max(time, maxCommittedTime); } private long getMaxCommittedTime() { if (maxCommittedTime == Long.MIN_VALUE) throw new IllegalStateException("Tried to read max committed time when it was never set"); return maxCommittedTime; } } private class TabletMemory { private InMemoryMap memTable; private InMemoryMap otherMemTable; private InMemoryMap deletingMemTable; private int nextSeq = 1; private CommitSession commitSession; TabletMemory() { try { memTable = new InMemoryMap(acuTableConf); } catch (LocalityGroupConfigurationError e) { throw new RuntimeException(e); } commitSession = new CommitSession(nextSeq, memTable); nextSeq += 2; } InMemoryMap getMemTable() { return memTable; } InMemoryMap getMinCMemTable() { return otherMemTable; } CommitSession prepareForMinC() { if (otherMemTable != null) { throw new IllegalStateException(); } if (deletingMemTable != null) { throw new IllegalStateException(); } otherMemTable = memTable; try { memTable = new InMemoryMap(acuTableConf); } catch (LocalityGroupConfigurationError e) { throw new RuntimeException(e); } CommitSession oldCommitSession = commitSession; commitSession = new CommitSession(nextSeq, memTable); nextSeq += 2; tabletResources.updateMemoryUsageStats(memTable.estimatedSizeInBytes(), otherMemTable.estimatedSizeInBytes()); return oldCommitSession; } void finishedMinC() { if (otherMemTable == null) { throw new IllegalStateException(); } if (deletingMemTable != null) { throw new IllegalStateException(); } deletingMemTable = otherMemTable; otherMemTable = null; Tablet.this.notifyAll(); } void finalizeMinC() { try { deletingMemTable.delete(15000); } finally { synchronized (Tablet.this) { if (otherMemTable != null) { throw new IllegalStateException(); } if (deletingMemTable == null) { throw new IllegalStateException(); } deletingMemTable = null; tabletResources.updateMemoryUsageStats(memTable.estimatedSizeInBytes(), 0); } } } boolean memoryReservedForMinC() { return otherMemTable != null || deletingMemTable != null; } void waitForMinC() { while (otherMemTable != null || deletingMemTable != null) { try { Tablet.this.wait(50); } catch (InterruptedException e) { log.warn(e, e); } } } void mutate(CommitSession cm, List<Mutation> mutations) { cm.memTable.mutate(mutations); } void updateMemoryUsageStats() { long other = 0; if (otherMemTable != null) other = otherMemTable.estimatedSizeInBytes(); else if (deletingMemTable != null) other = deletingMemTable.estimatedSizeInBytes(); tabletResources.updateMemoryUsageStats(memTable.estimatedSizeInBytes(), other); } List<MemoryIterator> getIterators() { List<MemoryIterator> toReturn = new ArrayList<MemoryIterator>(2); toReturn.add(memTable.skvIterator()); if (otherMemTable != null) toReturn.add(otherMemTable.skvIterator()); return toReturn; } void returnIterators(List<MemoryIterator> iters) { for (MemoryIterator iter : iters) { iter.close(); } } public long getNumEntries() { if (otherMemTable != null) return memTable.getNumEntries() + otherMemTable.getNumEntries(); return memTable.getNumEntries(); } CommitSession getCommitSession() { return commitSession; } } private TabletMemory tabletMemory; private final TabletTime tabletTime; private long persistedTime; private final Object timeLock = new Object(); private final Path location; // absolute path of this tablets dir private TServerInstance lastLocation; private Configuration conf; private VolumeManager fs; private TableConfiguration acuTableConf; private volatile boolean tableDirChecked = false; private AtomicLong dataSourceDeletions = new AtomicLong(0); private Set<ScanDataSource> activeScans = new HashSet<ScanDataSource>(); private volatile boolean closing = false; private boolean closed = false; private boolean closeComplete = false; private long lastFlushID = -1; private long lastCompactID = -1; private KeyExtent extent; private TabletResourceManager tabletResources; final private DatafileManager datafileManager; private volatile boolean majorCompactionInProgress = false; private volatile boolean majorCompactionWaitingToStart = false; private Set<MajorCompactionReason> majorCompactionQueued = Collections.synchronizedSet(EnumSet.noneOf(MajorCompactionReason.class)); private volatile boolean minorCompactionInProgress = false; private volatile boolean minorCompactionWaitingToStart = false; private boolean updatingFlushID = false; private AtomicReference<ConstraintChecker> constraintChecker = new AtomicReference<ConstraintChecker>(); private final String tabletDirectory; private int writesInProgress = 0; private static final Logger log = Logger.getLogger(Tablet.class); public TabletStatsKeeper timer; private Rate queryRate = new Rate(0.2); private long queryCount = 0; private Rate queryByteRate = new Rate(0.2); private long queryBytes = 0; private Rate ingestRate = new Rate(0.2); private long ingestCount = 0; private Rate ingestByteRate = new Rate(0.2); private long ingestBytes = 0; private byte[] defaultSecurityLabel = new byte[0]; private long lastMinorCompactionFinishTime; private long lastMapFileImportTime; private volatile long numEntries; private volatile long numEntriesInMemory; // a count of the amount of data read by the iterators private AtomicLong scannedCount = new AtomicLong(0); private Rate scannedRate = new Rate(0.2); private ConfigurationObserver configObserver; private TabletServer tabletServer; private final int logId; // ensure we only have one reader/writer of our bulk file notes at at time public final Object bulkFileImportLock = new Object(); public int getLogId() { return logId; } public static class TabletClosedException extends RuntimeException { public TabletClosedException(Exception e) { super(e); } public TabletClosedException() { super(); } private static final long serialVersionUID = 1L; } FileRef getNextMapFilename(String prefix) throws IOException { String extension = FileOperations.getNewFileExtension(tabletServer.getTableConfiguration(extent)); checkTabletDir(); return new FileRef(location.toString() + "/" + prefix + UniqueNameAllocator.getInstance().getNextName() + "." + extension); } private void checkTabletDir() throws IOException { if (!tableDirChecked) { checkTabletDir(this.location); tableDirChecked = true; } } private void checkTabletDir(Path tabletDir) throws IOException { FileStatus[] files = null; try { files = fs.listStatus(tabletDir); } catch (FileNotFoundException ex) { // ignored } if (files == null) { if (tabletDir.getName().startsWith("c-")) log.debug("Tablet " + extent + " had no dir, creating " + tabletDir); // its a clone dir... else log.warn("Tablet " + extent + " had no dir, creating " + tabletDir); fs.mkdirs(tabletDir); } } class DatafileManager { // access to datafilesizes needs to be synchronized: see CompactionRunner#getNumFiles final private Map<FileRef,DataFileValue> datafileSizes = Collections.synchronizedMap(new TreeMap<FileRef,DataFileValue>()); DatafileManager(SortedMap<FileRef,DataFileValue> datafileSizes) { for (Entry<FileRef,DataFileValue> datafiles : datafileSizes.entrySet()) this.datafileSizes.put(datafiles.getKey(), datafiles.getValue()); } FileRef mergingMinorCompactionFile = null; Set<FileRef> filesToDeleteAfterScan = new HashSet<FileRef>(); Map<Long,Set<FileRef>> scanFileReservations = new HashMap<Long,Set<FileRef>>(); MapCounter<FileRef> fileScanReferenceCounts = new MapCounter<FileRef>(); long nextScanReservationId = 0; boolean reservationsBlocked = false; Set<FileRef> majorCompactingFiles = new HashSet<FileRef>(); Pair<Long,Map<FileRef,DataFileValue>> reserveFilesForScan() { synchronized (Tablet.this) { while (reservationsBlocked) { try { Tablet.this.wait(50); } catch (InterruptedException e) { log.warn(e, e); } } Set<FileRef> absFilePaths = new HashSet<FileRef>(datafileSizes.keySet()); long rid = nextScanReservationId++; scanFileReservations.put(rid, absFilePaths); Map<FileRef,DataFileValue> ret = new HashMap<FileRef,DataFileValue>(); for (FileRef path : absFilePaths) { fileScanReferenceCounts.increment(path, 1); ret.put(path, datafileSizes.get(path)); } return new Pair<Long,Map<FileRef,DataFileValue>>(rid, ret); } } void returnFilesForScan(Long reservationId) { final Set<FileRef> filesToDelete = new HashSet<FileRef>(); synchronized (Tablet.this) { Set<FileRef> absFilePaths = scanFileReservations.remove(reservationId); if (absFilePaths == null) throw new IllegalArgumentException("Unknown scan reservation id " + reservationId); boolean notify = false; for (FileRef path : absFilePaths) { long refCount = fileScanReferenceCounts.decrement(path, 1); if (refCount == 0) { if (filesToDeleteAfterScan.remove(path)) filesToDelete.add(path); notify = true; } else if (refCount < 0) throw new IllegalStateException("Scan ref count for " + path + " is " + refCount); } if (notify) Tablet.this.notifyAll(); } if (filesToDelete.size() > 0) { log.debug("Removing scan refs from metadata " + extent + " " + filesToDelete); MetadataTableUtil.removeScanFiles(extent, filesToDelete, SystemCredentials.get(), tabletServer.getLock()); } } private void removeFilesAfterScan(Set<FileRef> scanFiles) { if (scanFiles.size() == 0) return; Set<FileRef> filesToDelete = new HashSet<FileRef>(); synchronized (Tablet.this) { for (FileRef path : scanFiles) { if (fileScanReferenceCounts.get(path) == 0) filesToDelete.add(path); else filesToDeleteAfterScan.add(path); } } if (filesToDelete.size() > 0) { log.debug("Removing scan refs from metadata " + extent + " " + filesToDelete); MetadataTableUtil.removeScanFiles(extent, filesToDelete, SystemCredentials.get(), tabletServer.getLock()); } } private TreeSet<FileRef> waitForScansToFinish(Set<FileRef> pathsToWaitFor, boolean blockNewScans, long maxWaitTime) { long startTime = System.currentTimeMillis(); TreeSet<FileRef> inUse = new TreeSet<FileRef>(); Span waitForScans = Trace.start("waitForScans"); try { synchronized (Tablet.this) { if (blockNewScans) { if (reservationsBlocked) throw new IllegalStateException(); reservationsBlocked = true; } for (FileRef path : pathsToWaitFor) { while (fileScanReferenceCounts.get(path) > 0 && System.currentTimeMillis() - startTime < maxWaitTime) { try { Tablet.this.wait(100); } catch (InterruptedException e) { log.warn(e, e); } } } for (FileRef path : pathsToWaitFor) { if (fileScanReferenceCounts.get(path) > 0) inUse.add(path); } if (blockNewScans) { reservationsBlocked = false; Tablet.this.notifyAll(); } } } finally { waitForScans.stop(); } return inUse; } public void importMapFiles(long tid, Map<FileRef,DataFileValue> pathsString, boolean setTime) throws IOException { String bulkDir = null; Map<FileRef,DataFileValue> paths = new HashMap<FileRef,DataFileValue>(); for (Entry<FileRef,DataFileValue> entry : pathsString.entrySet()) paths.put(entry.getKey(), entry.getValue()); for (FileRef tpath : paths.keySet()) { boolean inTheRightDirectory = false; Path parent = tpath.path().getParent().getParent(); for (String tablesDir : ServerConstants.getTablesDirs()) { if (parent.equals(new Path(tablesDir, extent.getTableId().toString()))) { inTheRightDirectory = true; break; } } if (!inTheRightDirectory) { throw new IOException("Data file " + tpath + " not in table dirs"); } if (bulkDir == null) bulkDir = tpath.path().getParent().toString(); else if (!bulkDir.equals(tpath.path().getParent().toString())) throw new IllegalArgumentException("bulk files in different dirs " + bulkDir + " " + tpath); } if (extent.isRootTablet()) { throw new IllegalArgumentException("Can not import files to root tablet"); } synchronized (bulkFileImportLock) { Credentials creds = SystemCredentials.get(); Connector conn; try { conn = HdfsZooInstance.getInstance().getConnector(creds.getPrincipal(), creds.getToken()); } catch (Exception ex) { throw new IOException(ex); } // Remove any bulk files we've previously loaded and compacted away List<FileRef> files = MetadataTableUtil.getBulkFilesLoaded(conn, extent, tid); for (FileRef file : files) if (paths.keySet().remove(file.path())) log.debug("Ignoring request to re-import a file already imported: " + extent + ": " + file); if (paths.size() > 0) { long bulkTime = Long.MIN_VALUE; if (setTime) { for (DataFileValue dfv : paths.values()) { long nextTime = tabletTime.getAndUpdateTime(); if (nextTime < bulkTime) throw new IllegalStateException("Time went backwards unexpectedly " + nextTime + " " + bulkTime); bulkTime = nextTime; dfv.setTime(bulkTime); } } synchronized (timeLock) { if (bulkTime > persistedTime) persistedTime = bulkTime; MetadataTableUtil.updateTabletDataFile(tid, extent, paths, tabletTime.getMetadataValue(persistedTime), creds, tabletServer.getLock()); } } } synchronized (Tablet.this) { for (Entry<FileRef,DataFileValue> tpath : paths.entrySet()) { if (datafileSizes.containsKey(tpath.getKey())) { log.error("Adding file that is already in set " + tpath.getKey()); } datafileSizes.put(tpath.getKey(), tpath.getValue()); } tabletResources.importedMapFiles(); computeNumEntries(); } for (FileRef tpath : paths.keySet()) { log.log(TLevel.TABLET_HIST, extent + " import " + tpath + " " + paths.get(tpath)); } } FileRef reserveMergingMinorCompactionFile() { if (mergingMinorCompactionFile != null) throw new IllegalStateException("Tried to reserve merging minor compaction file when already reserved : " + mergingMinorCompactionFile); if (extent.isRootTablet()) return null; int maxFiles = acuTableConf.getMaxFilesPerTablet(); // when a major compaction is running and we are at max files, write out // one extra file... want to avoid the case where major compaction is // compacting everything except for the largest file, and therefore the // largest file is returned for merging.. the following check mostly // avoids this case, except for the case where major compactions fail or // are canceled if (majorCompactingFiles.size() > 0 && datafileSizes.size() == maxFiles) return null; if (datafileSizes.size() >= maxFiles) { // find the smallest file long min = Long.MAX_VALUE; FileRef minName = null; for (Entry<FileRef,DataFileValue> entry : datafileSizes.entrySet()) { if (entry.getValue().getSize() < min && !majorCompactingFiles.contains(entry.getKey())) { min = entry.getValue().getSize(); minName = entry.getKey(); } } if (minName == null) return null; mergingMinorCompactionFile = minName; return minName; } return null; } void unreserveMergingMinorCompactionFile(FileRef file) { if ((file == null && mergingMinorCompactionFile != null) || (file != null && mergingMinorCompactionFile == null) || (file != null && mergingMinorCompactionFile != null && !file.equals(mergingMinorCompactionFile))) throw new IllegalStateException("Disagreement " + file + " " + mergingMinorCompactionFile); mergingMinorCompactionFile = null; } void bringMinorCompactionOnline(FileRef tmpDatafile, FileRef newDatafile, FileRef absMergeFile, DataFileValue dfv, CommitSession commitSession, long flushId) throws IOException { IZooReaderWriter zoo = ZooReaderWriter.getRetryingInstance(); if (extent.isRootTablet()) { try { if (!zoo.isLockHeld(tabletServer.getLock().getLockID())) { throw new IllegalStateException(); } } catch (Exception e) { throw new IllegalStateException("Can not bring major compaction online, lock not held", e); } } // rename before putting in metadata table, so files in metadata table should // always exist do { try { if (dfv.getNumEntries() == 0) { fs.deleteRecursively(tmpDatafile.path()); } else { if (fs.exists(newDatafile.path())) { log.warn("Target map file already exist " + newDatafile); fs.deleteRecursively(newDatafile.path()); } if (!fs.rename(tmpDatafile.path(), newDatafile.path())) { throw new IOException("rename fails"); } } break; } catch (IOException ioe) { log.warn("Tablet " + extent + " failed to rename " + newDatafile + " after MinC, will retry in 60 secs...", ioe); UtilWaitThread.sleep(60 * 1000); } } while (true); long t1, t2; // the code below always assumes merged files are in use by scans... this must be done // because the in memory list of files is not updated until after the metadata table // therefore the file is available to scans until memory is updated, but want to ensure // the file is not available for garbage collection... if memory were updated // before this point (like major compactions do), then the following code could wait // for scans to finish like major compactions do.... used to wait for scans to finish // here, but that was incorrect because a scan could start after waiting but before // memory was updated... assuming the file is always in use by scans leads to // one uneeded metadata update when it was not actually in use Set<FileRef> filesInUseByScans = Collections.emptySet(); if (absMergeFile != null) filesInUseByScans = Collections.singleton(absMergeFile); // very important to write delete entries outside of log lock, because // this !METADATA write does not go up... it goes sideways or to itself if (absMergeFile != null) MetadataTableUtil.addDeleteEntries(extent, Collections.singleton(absMergeFile), SystemCredentials.get()); Set<String> unusedWalLogs = beginClearingUnusedLogs(); try { // the order of writing to !METADATA and walog is important in the face of machine/process failures // need to write to !METADATA before writing to walog, when things are done in the reverse order // data could be lost... the minor compaction start even should be written before the following metadata // write is made synchronized (timeLock) { if (commitSession.getMaxCommittedTime() > persistedTime) persistedTime = commitSession.getMaxCommittedTime(); String time = tabletTime.getMetadataValue(persistedTime); MetadataTableUtil.updateTabletDataFile(extent, newDatafile, absMergeFile, dfv, time, SystemCredentials.get(), filesInUseByScans, tabletServer.getClientAddressString(), tabletServer.getLock(), unusedWalLogs, lastLocation, flushId); } } finally { finishClearingUnusedLogs(); } do { try { // the purpose of making this update use the new commit session, instead of the old one passed in, // is because the new one will reference the logs used by current memory... tabletServer.minorCompactionFinished(tabletMemory.getCommitSession(), newDatafile.toString(), commitSession.getWALogSeq() + 2); break; } catch (IOException e) { log.error("Failed to write to write-ahead log " + e.getMessage() + " will retry", e); UtilWaitThread.sleep(1 * 1000); } } while (true); synchronized (Tablet.this) { lastLocation = null; t1 = System.currentTimeMillis(); if (datafileSizes.containsKey(newDatafile)) { log.error("Adding file that is already in set " + newDatafile); } if (dfv.getNumEntries() > 0) { datafileSizes.put(newDatafile, dfv); } if (absMergeFile != null) { datafileSizes.remove(absMergeFile); } unreserveMergingMinorCompactionFile(absMergeFile); dataSourceDeletions.incrementAndGet(); tabletMemory.finishedMinC(); lastFlushID = flushId; computeNumEntries(); t2 = System.currentTimeMillis(); } // must do this after list of files in memory is updated above removeFilesAfterScan(filesInUseByScans); if (absMergeFile != null) log.log(TLevel.TABLET_HIST, extent + " MinC [" + absMergeFile + ",memory] -> " + newDatafile); else log.log(TLevel.TABLET_HIST, extent + " MinC [memory] -> " + newDatafile); log.debug(String.format("MinC finish lock %.2f secs %s", (t2 - t1) / 1000.0, getExtent().toString())); if (dfv.getSize() > acuTableConf.getMemoryInBytes(Property.TABLE_SPLIT_THRESHOLD)) { log.debug(String.format("Minor Compaction wrote out file larger than split threshold. split threshold = %,d file size = %,d", acuTableConf.getMemoryInBytes(Property.TABLE_SPLIT_THRESHOLD), dfv.getSize())); } } public void reserveMajorCompactingFiles(Collection<FileRef> files) { if (majorCompactingFiles.size() != 0) throw new IllegalStateException("Major compacting files not empty " + majorCompactingFiles); if (mergingMinorCompactionFile != null && files.contains(mergingMinorCompactionFile)) throw new IllegalStateException("Major compaction tried to resrve file in use by minor compaction " + mergingMinorCompactionFile); majorCompactingFiles.addAll(files); } public void clearMajorCompactingFile() { majorCompactingFiles.clear(); } void bringMajorCompactionOnline(Set<FileRef> oldDatafiles, FileRef tmpDatafile, FileRef newDatafile, Long compactionId, DataFileValue dfv) throws IOException { long t1, t2; if (!extent.isRootTablet()) { if (fs.exists(newDatafile.path())) { log.error("Target map file already exist " + newDatafile, new Exception()); throw new IllegalStateException("Target map file already exist " + newDatafile); } // rename before putting in metadata table, so files in metadata table should // always exist if (!fs.rename(tmpDatafile.path(), newDatafile.path())) log.warn("Rename of " + tmpDatafile + " to " + newDatafile + " returned false"); if (dfv.getNumEntries() == 0) { fs.deleteRecursively(newDatafile.path()); } } TServerInstance lastLocation = null; synchronized (Tablet.this) { t1 = System.currentTimeMillis(); IZooReaderWriter zoo = ZooReaderWriter.getRetryingInstance(); dataSourceDeletions.incrementAndGet(); if (extent.isRootTablet()) { waitForScansToFinish(oldDatafiles, true, Long.MAX_VALUE); try { if (!zoo.isLockHeld(tabletServer.getLock().getLockID())) { throw new IllegalStateException(); } } catch (Exception e) { throw new IllegalStateException("Can not bring major compaction online, lock not held", e); } // mark files as ready for deletion, but // do not delete them until we successfully // rename the compacted map file, in case // the system goes down String compactName = newDatafile.path().getName(); for (FileRef ref : oldDatafiles) { Path path = ref.path(); fs.rename(path, new Path(location + "/delete+" + compactName + "+" + path.getName())); } if (fs.exists(newDatafile.path())) { log.error("Target map file already exist " + newDatafile, new Exception()); throw new IllegalStateException("Target map file already exist " + newDatafile); } if (!fs.rename(tmpDatafile.path(), newDatafile.path())) log.warn("Rename of " + tmpDatafile + " to " + newDatafile + " returned false"); // start deleting files, if we do not finish they will be cleaned // up later for (FileRef ref : oldDatafiles) { Path path = ref.path(); Path deleteFile = new Path(location + "/delete+" + compactName + "+" + path.getName()); if (acuTableConf.getBoolean(Property.GC_TRASH_IGNORE) || !fs.moveToTrash(deleteFile)) fs.deleteRecursively(deleteFile); } } // atomically remove old files and add new file for (FileRef oldDatafile : oldDatafiles) { if (!datafileSizes.containsKey(oldDatafile)) { log.error("file does not exist in set " + oldDatafile); } datafileSizes.remove(oldDatafile); majorCompactingFiles.remove(oldDatafile); } if (datafileSizes.containsKey(newDatafile)) { log.error("Adding file that is already in set " + newDatafile); } if (dfv.getNumEntries() > 0) { datafileSizes.put(newDatafile, dfv); } // could be used by a follow on compaction in a multipass compaction majorCompactingFiles.add(newDatafile); computeNumEntries(); lastLocation = Tablet.this.lastLocation; Tablet.this.lastLocation = null; if (compactionId != null) lastCompactID = compactionId; t2 = System.currentTimeMillis(); } if (!extent.isRootTablet()) { Set<FileRef> filesInUseByScans = waitForScansToFinish(oldDatafiles, false, 10000); if (filesInUseByScans.size() > 0) log.debug("Adding scan refs to metadata " + extent + " " + filesInUseByScans); MetadataTableUtil.replaceDatafiles(extent, oldDatafiles, filesInUseByScans, newDatafile, compactionId, dfv, SystemCredentials.get(), tabletServer.getClientAddressString(), lastLocation, tabletServer.getLock()); removeFilesAfterScan(filesInUseByScans); } log.debug(String.format("MajC finish lock %.2f secs", (t2 - t1) / 1000.0)); log.log(TLevel.TABLET_HIST, extent + " MajC " + oldDatafiles + " --> " + newDatafile); } public SortedMap<FileRef,DataFileValue> getDatafileSizes() { synchronized (Tablet.this) { TreeMap<FileRef,DataFileValue> copy = new TreeMap<FileRef,DataFileValue>(datafileSizes); return Collections.unmodifiableSortedMap(copy); } } public Set<FileRef> getFiles() { synchronized (Tablet.this) { HashSet<FileRef> files = new HashSet<FileRef>(datafileSizes.keySet()); return Collections.unmodifiableSet(files); } } } public Tablet(TabletServer tabletServer, Text location, KeyExtent extent, TabletResourceManager trm, SortedMap<Key,Value> tabletsKeyValues) throws IOException { this(tabletServer, location, extent, trm, CachedConfiguration.getInstance(), tabletsKeyValues); splitCreationTime = 0; } public Tablet(TabletServer tabletServer, Text location, KeyExtent extent, TabletResourceManager trm, SortedMap<FileRef,DataFileValue> datafiles, String time, long initFlushID, long initCompactID) throws IOException { this(tabletServer, location, extent, trm, CachedConfiguration.getInstance(), datafiles, time, initFlushID, initCompactID); splitCreationTime = System.currentTimeMillis(); } private Tablet(TabletServer tabletServer, Text location, KeyExtent extent, TabletResourceManager trm, Configuration conf, SortedMap<Key,Value> tabletsKeyValues) throws IOException { this(tabletServer, location, extent, trm, conf, VolumeManagerImpl.get(), tabletsKeyValues); } static private final List<LogEntry> EMPTY = Collections.emptyList(); private Tablet(TabletServer tabletServer, Text location, KeyExtent extent, TabletResourceManager trm, Configuration conf, SortedMap<FileRef,DataFileValue> datafiles, String time, long initFlushID, long initCompactID) throws IOException { this(tabletServer, location, extent, trm, conf, VolumeManagerImpl.get(), EMPTY, datafiles, time, null, new HashSet<FileRef>(), initFlushID, initCompactID); } private static String lookupTime(AccumuloConfiguration conf, KeyExtent extent, SortedMap<Key,Value> tabletsKeyValues) { SortedMap<Key,Value> entries; if (extent.isRootTablet()) { return null; } else { entries = new TreeMap<Key,Value>(); Text rowName = extent.getMetadataEntry(); for (Entry<Key,Value> entry : tabletsKeyValues.entrySet()) { if (entry.getKey().compareRow(rowName) == 0 && TabletsSection.ServerColumnFamily.TIME_COLUMN.hasColumns(entry.getKey())) { entries.put(new Key(entry.getKey()), new Value(entry.getValue())); } } } // log.debug("extent : "+extent+" entries : "+entries); if (entries.size() == 1) return entries.values().iterator().next().toString(); return null; } private static SortedMap<FileRef,DataFileValue> lookupDatafiles(AccumuloConfiguration conf, VolumeManager fs, KeyExtent extent, SortedMap<Key,Value> tabletsKeyValues) throws IOException { TreeMap<FileRef,DataFileValue> datafiles = new TreeMap<FileRef,DataFileValue>(); if (extent.isRootTablet()) { // the meta0 tablet Path location = new Path(ServerConstants.getRootTabletDir()); location = location.makeQualified(fs.getDefaultVolume()); // cleanUpFiles() has special handling for delete. files FileStatus[] files = fs.listStatus(location); Collection<String> goodPaths = cleanUpFiles(fs, files, true); for (String good : goodPaths) { Path path = new Path(good); String filename = path.getName(); FileRef ref = new FileRef(location.toString() + "/" + filename, path); DataFileValue dfv = new DataFileValue(0, 0); datafiles.put(ref, dfv); } } else { Text rowName = extent.getMetadataEntry(); String tableId = extent.isMeta() ? RootTable.ID : MetadataTable.ID; ScannerImpl mdScanner = new ScannerImpl(HdfsZooInstance.getInstance(), SystemCredentials.get(), tableId, Authorizations.EMPTY); // Commented out because when no data file is present, each tablet will scan through metadata table and return nothing // reduced batch size to improve performance // changed here after endKeys were implemented from 10 to 1000 mdScanner.setBatchSize(1000); // leave these in, again, now using endKey for safety mdScanner.fetchColumnFamily(DataFileColumnFamily.NAME); mdScanner.setRange(new Range(rowName)); for (Entry<Key,Value> entry : mdScanner) { if (entry.getKey().compareRow(rowName) != 0) { break; } FileRef ref = new FileRef(entry.getKey().getColumnQualifier().toString(), fs.getFullPath(entry.getKey())); datafiles.put(ref, new DataFileValue(entry.getValue().get())); } } return datafiles; } private static List<LogEntry> lookupLogEntries(KeyExtent ke, SortedMap<Key,Value> tabletsKeyValues) { List<LogEntry> logEntries = new ArrayList<LogEntry>(); if (ke.isMeta()) { try { logEntries = MetadataTableUtil.getLogEntries(SystemCredentials.get(), ke); } catch (Exception ex) { throw new RuntimeException("Unable to read tablet log entries", ex); } } else { log.debug("Looking at metadata " + tabletsKeyValues); Text row = ke.getMetadataEntry(); for (Entry<Key,Value> entry : tabletsKeyValues.entrySet()) { Key key = entry.getKey(); if (key.getRow().equals(row)) { if (key.getColumnFamily().equals(LogColumnFamily.NAME)) { logEntries.add(MetadataTableUtil.entryFromKeyValue(key, entry.getValue())); } } } } log.debug("got " + logEntries + " for logs for " + ke); return logEntries; } private static Set<FileRef> lookupScanFiles(KeyExtent extent, SortedMap<Key,Value> tabletsKeyValues, VolumeManager fs) throws IOException { HashSet<FileRef> scanFiles = new HashSet<FileRef>(); Text row = extent.getMetadataEntry(); for (Entry<Key,Value> entry : tabletsKeyValues.entrySet()) { Key key = entry.getKey(); if (key.getRow().equals(row) && key.getColumnFamily().equals(ScanFileColumnFamily.NAME)) { String meta = key.getColumnQualifier().toString(); Path path = fs.getFullPath(extent.getTableId().toString(), meta); scanFiles.add(new FileRef(meta, path)); } } return scanFiles; } private static long lookupFlushID(KeyExtent extent, SortedMap<Key,Value> tabletsKeyValues) { Text row = extent.getMetadataEntry(); for (Entry<Key,Value> entry : tabletsKeyValues.entrySet()) { Key key = entry.getKey(); if (key.getRow().equals(row) && TabletsSection.ServerColumnFamily.FLUSH_COLUMN.equals(key.getColumnFamily(), key.getColumnQualifier())) return Long.parseLong(entry.getValue().toString()); } return -1; } private static long lookupCompactID(KeyExtent extent, SortedMap<Key,Value> tabletsKeyValues) { Text row = extent.getMetadataEntry(); for (Entry<Key,Value> entry : tabletsKeyValues.entrySet()) { Key key = entry.getKey(); if (key.getRow().equals(row) && TabletsSection.ServerColumnFamily.COMPACT_COLUMN.equals(key.getColumnFamily(), key.getColumnQualifier())) return Long.parseLong(entry.getValue().toString()); } return -1; } private Tablet(TabletServer tabletServer, Text location, KeyExtent extent, TabletResourceManager trm, Configuration conf, VolumeManager fs, SortedMap<Key,Value> tabletsKeyValues) throws IOException { this(tabletServer, location, extent, trm, conf, fs, lookupLogEntries(extent, tabletsKeyValues), lookupDatafiles(tabletServer.getSystemConfiguration(), fs, extent, tabletsKeyValues), lookupTime(tabletServer.getSystemConfiguration(), extent, tabletsKeyValues), lookupLastServer(extent, tabletsKeyValues), lookupScanFiles(extent, tabletsKeyValues, fs), lookupFlushID(extent, tabletsKeyValues), lookupCompactID(extent, tabletsKeyValues)); } private static TServerInstance lookupLastServer(KeyExtent extent, SortedMap<Key,Value> tabletsKeyValues) { for (Entry<Key,Value> entry : tabletsKeyValues.entrySet()) { if (entry.getKey().getColumnFamily().compareTo(TabletsSection.LastLocationColumnFamily.NAME) == 0) { return new TServerInstance(entry.getValue(), entry.getKey().getColumnQualifier()); } } return null; } /** * yet another constructor - this one allows us to avoid costly lookups into the Metadata table if we already know the files we need - as at split time */ private Tablet(final TabletServer tabletServer, final Text location, final KeyExtent extent, final TabletResourceManager trm, final Configuration conf, final VolumeManager fs, final List<LogEntry> logEntries, final SortedMap<FileRef,DataFileValue> datafiles, String time, final TServerInstance lastLocation, Set<FileRef> scanFiles, long initFlushID, long initCompactID) throws IOException { Path locationPath; if (location.find(":") >= 0) { locationPath = new Path(location.toString()); } else { locationPath = fs.getFullPath(FileType.TABLE, extent.getTableId().toString() + location.toString()); } this.location = locationPath.makeQualified(fs.getFileSystemByPath(locationPath)); this.lastLocation = lastLocation; this.tabletDirectory = location.toString(); this.conf = conf; this.acuTableConf = tabletServer.getTableConfiguration(extent); this.fs = fs; this.extent = extent; this.tabletResources = trm; this.lastFlushID = initFlushID; this.lastCompactID = initCompactID; if (extent.isRootTablet()) { long rtime = Long.MIN_VALUE; for (FileRef ref : datafiles.keySet()) { Path path = ref.path(); FileSystem ns = fs.getFileSystemByPath(path); FileSKVIterator reader = FileOperations.getInstance().openReader(path.toString(), true, ns, ns.getConf(), tabletServer.getTableConfiguration(extent)); long maxTime = -1; try { while (reader.hasTop()) { maxTime = Math.max(maxTime, reader.getTopKey().getTimestamp()); reader.next(); } } finally { reader.close(); } if (maxTime > rtime) { time = TabletTime.LOGICAL_TIME_ID + "" + maxTime; rtime = maxTime; } } } if (time == null && datafiles.isEmpty() && extent.equals(RootTable.OLD_EXTENT)) { // recovery... old root tablet has no data, so time doesn't matter: time = TabletTime.LOGICAL_TIME_ID + "" + Long.MIN_VALUE; } this.tabletServer = tabletServer; this.logId = tabletServer.createLogId(extent); this.timer = new TabletStatsKeeper(); setupDefaultSecurityLabels(extent); tabletMemory = new TabletMemory(); tabletTime = TabletTime.getInstance(time); persistedTime = tabletTime.getTime(); acuTableConf.addObserver(configObserver = new ConfigurationObserver() { private void reloadConstraints() { constraintChecker.set(new ConstraintChecker(getTableConfiguration())); } @Override public void propertiesChanged() { reloadConstraints(); try { setupDefaultSecurityLabels(extent); } catch (Exception e) { log.error("Failed to reload default security labels for extent: " + extent.toString()); } } @Override public void propertyChanged(String prop) { if (prop.startsWith(Property.TABLE_CONSTRAINT_PREFIX.getKey())) reloadConstraints(); else if (prop.equals(Property.TABLE_DEFAULT_SCANTIME_VISIBILITY.getKey())) { try { log.info("Default security labels changed for extent: " + extent.toString()); setupDefaultSecurityLabels(extent); } catch (Exception e) { log.error("Failed to reload default security labels for extent: " + extent.toString()); } } } @Override public void sessionExpired() { log.debug("Session expired, no longer updating per table props..."); } }); // Force a load of any per-table properties configObserver.propertiesChanged(); tabletResources.setTablet(this, acuTableConf); if (!logEntries.isEmpty()) { log.info("Starting Write-Ahead Log recovery for " + this.extent); final long[] count = new long[2]; final CommitSession commitSession = tabletMemory.getCommitSession(); count[1] = Long.MIN_VALUE; try { Set<String> absPaths = new HashSet<String>(); for (FileRef ref : datafiles.keySet()) absPaths.add(ref.path().toString()); tabletServer.recover(this.tabletServer.getFileSystem(), this, logEntries, absPaths, new MutationReceiver() { @Override public void receive(Mutation m) { // LogReader.printMutation(m); Collection<ColumnUpdate> muts = m.getUpdates(); for (ColumnUpdate columnUpdate : muts) { if (!columnUpdate.hasTimestamp()) { // if it is not a user set timestamp, it must have been set // by the system count[1] = Math.max(count[1], columnUpdate.getTimestamp()); } } tabletMemory.mutate(commitSession, Collections.singletonList(m)); count[0]++; } }); if (count[1] != Long.MIN_VALUE) { tabletTime.useMaxTimeFromWALog(count[1]); } commitSession.updateMaxCommittedTime(tabletTime.getTime()); tabletMemory.updateMemoryUsageStats(); if (count[0] == 0) { MetadataTableUtil.removeUnusedWALEntries(extent, logEntries, tabletServer.getLock()); logEntries.clear(); } } catch (Throwable t) { if (acuTableConf.getBoolean(Property.TABLE_FAILURES_IGNORE)) { log.warn("Error recovering from log files: ", t); } else { throw new RuntimeException(t); } } // make some closed references that represent the recovered logs currentLogs = new HashSet<DfsLogger>(); for (LogEntry logEntry : logEntries) { for (String log : logEntry.logSet) { String[] parts = log.split("/", 2); Path file = fs.getFullPath(FileType.WAL, parts[1]); currentLogs.add(new DfsLogger(tabletServer.getServerConfig(), logEntry.server, file)); } } log.info("Write-Ahead Log recovery complete for " + this.extent + " (" + count[0] + " mutations applied, " + tabletMemory.getNumEntries() + " entries created)"); } String contextName = acuTableConf.get(Property.TABLE_CLASSPATH); if (contextName != null && !contextName.equals("")) { // initialize context classloader, instead of possibly waiting for it to initialize for a scan // TODO this could hang, causing other tablets to fail to load - ACCUMULO-1292 AccumuloVFSClassLoader.getContextManager().getClassLoader(contextName); } // do this last after tablet is completely setup because it // could cause major compaction to start datafileManager = new DatafileManager(datafiles); computeNumEntries(); datafileManager.removeFilesAfterScan(scanFiles); // look for hints of a failure on the previous tablet server if (!logEntries.isEmpty() || needsMajorCompaction(MajorCompactionReason.NORMAL)) { // look for any temp files hanging around removeOldTemporaryFiles(); } log.log(TLevel.TABLET_HIST, extent + " opened"); } private void removeOldTemporaryFiles() { // remove any temporary files created by a previous tablet server try { for (FileStatus tmp : fs.globStatus(new Path(location, "*_tmp"))){ try { log.debug("Removing old temp file " + tmp.getPath()); fs.delete(tmp.getPath()); } catch (IOException ex) { log.error("Unable to remove old temp file " + tmp.getPath() + ": " + ex); } } } catch (IOException ex) { log.error("Error scanning for old temp files in " + location); } } private void setupDefaultSecurityLabels(KeyExtent extent) { if (extent.isMeta()) { defaultSecurityLabel = new byte[0]; } else { try { ColumnVisibility cv = new ColumnVisibility(acuTableConf.get(Property.TABLE_DEFAULT_SCANTIME_VISIBILITY)); this.defaultSecurityLabel = cv.getExpression(); } catch (Exception e) { log.error(e, e); this.defaultSecurityLabel = new byte[0]; } } } private static Collection<String> cleanUpFiles(VolumeManager fs, FileStatus[] files, boolean deleteTmp) throws IOException { /* * called in constructor and before major compactions */ Collection<String> goodFiles = new ArrayList<String>(files.length); for (FileStatus file : files) { String path = file.getPath().toString(); String filename = file.getPath().getName(); // check for incomplete major compaction, this should only occur // for root tablet if (filename.startsWith("delete+")) { String expectedCompactedFile = path.substring(0, path.lastIndexOf("/delete+")) + "/" + filename.split("\\+")[1]; if (fs.exists(new Path(expectedCompactedFile))) { // compaction finished, but did not finish deleting compacted files.. so delete it if (!fs.deleteRecursively(file.getPath())) log.warn("Delete of file: " + file.getPath().toString() + " return false"); continue; } // compaction did not finish, so put files back // reset path and filename for rest of loop filename = filename.split("\\+", 3)[2]; path = path.substring(0, path.lastIndexOf("/delete+")) + "/" + filename; if (!fs.rename(file.getPath(), new Path(path))) log.warn("Rename of " + file.getPath().toString() + " to " + path + " returned false"); } if (filename.endsWith("_tmp")) { if (deleteTmp) { log.warn("cleaning up old tmp file: " + path); if (!fs.deleteRecursively(file.getPath())) log.warn("Delete of tmp file: " + file.getPath().toString() + " return false"); } continue; } if (!filename.startsWith(Constants.MAPFILE_EXTENSION + "_") && !FileOperations.getValidExtensions().contains(filename.split("\\.")[1])) { log.error("unknown file in tablet" + path); continue; } goodFiles.add(path); } return goodFiles; } public static class KVEntry extends KeyValue { public KVEntry(Key k, Value v) { super(new Key(k), Arrays.copyOf(v.get(), v.get().length)); } @Override public String toString() { return key.toString() + "=" + getValue(); } int numBytes() { return key.getSize() + getValue().get().length; } int estimateMemoryUsed() { return key.getSize() + getValue().get().length + (9 * 32); // overhead is 32 per object } } private LookupResult lookup(SortedKeyValueIterator<Key,Value> mmfi, List<Range> ranges, HashSet<Column> columnSet, ArrayList<KVEntry> results, long maxResultsSize) throws IOException { LookupResult lookupResult = new LookupResult(); boolean exceededMemoryUsage = false; boolean tabletClosed = false; Set<ByteSequence> cfset = null; if (columnSet.size() > 0) cfset = LocalityGroupUtil.families(columnSet); for (Range range : ranges) { if (exceededMemoryUsage || tabletClosed) { lookupResult.unfinishedRanges.add(range); continue; } int entriesAdded = 0; try { if (cfset != null) mmfi.seek(range, cfset, true); else mmfi.seek(range, LocalityGroupUtil.EMPTY_CF_SET, false); while (mmfi.hasTop()) { Key key = mmfi.getTopKey(); KVEntry kve = new KVEntry(key, mmfi.getTopValue()); results.add(kve); entriesAdded++; lookupResult.bytesAdded += kve.estimateMemoryUsed(); lookupResult.dataSize += kve.numBytes(); exceededMemoryUsage = lookupResult.bytesAdded > maxResultsSize; if (exceededMemoryUsage) { addUnfinishedRange(lookupResult, range, key, false); break; } mmfi.next(); } } catch (TooManyFilesException tmfe) { // treat this as a closed tablet, and let the client retry log.warn("Tablet " + getExtent() + " has too many files, batch lookup can not run"); handleTabletClosedDuringScan(results, lookupResult, exceededMemoryUsage, range, entriesAdded); tabletClosed = true; } catch (IOException ioe) { if (shutdownInProgress()) { // assume HDFS shutdown hook caused this exception log.debug("IOException while shutdown in progress ", ioe); handleTabletClosedDuringScan(results, lookupResult, exceededMemoryUsage, range, entriesAdded); tabletClosed = true; } else { throw ioe; } } catch (IterationInterruptedException iie) { if (isClosed()) { handleTabletClosedDuringScan(results, lookupResult, exceededMemoryUsage, range, entriesAdded); tabletClosed = true; } else { throw iie; } } catch (TabletClosedException tce) { handleTabletClosedDuringScan(results, lookupResult, exceededMemoryUsage, range, entriesAdded); tabletClosed = true; } } return lookupResult; } private void handleTabletClosedDuringScan(ArrayList<KVEntry> results, LookupResult lookupResult, boolean exceededMemoryUsage, Range range, int entriesAdded) { if (exceededMemoryUsage) throw new IllegalStateException("tablet should not exceed memory usage or close, not both"); if (entriesAdded > 0) addUnfinishedRange(lookupResult, range, results.get(results.size() - 1).key, false); else lookupResult.unfinishedRanges.add(range); lookupResult.closed = true; } private void addUnfinishedRange(LookupResult lookupResult, Range range, Key key, boolean inclusiveStartKey) { if (range.getEndKey() == null || key.compareTo(range.getEndKey()) < 0) { Range nlur = new Range(new Key(key), inclusiveStartKey, range.getEndKey(), range.isEndKeyInclusive()); lookupResult.unfinishedRanges.add(nlur); } } public static interface KVReceiver { void receive(List<KVEntry> matches) throws IOException; } class LookupResult { List<Range> unfinishedRanges = new ArrayList<Range>(); long bytesAdded = 0; long dataSize = 0; boolean closed = false; } public LookupResult lookup(List<Range> ranges, HashSet<Column> columns, Authorizations authorizations, ArrayList<KVEntry> results, long maxResultSize, List<IterInfo> ssiList, Map<String,Map<String,String>> ssio, AtomicBoolean interruptFlag) throws IOException { if (ranges.size() == 0) { return new LookupResult(); } ranges = Range.mergeOverlapping(ranges); Collections.sort(ranges); Range tabletRange = extent.toDataRange(); for (Range range : ranges) { // do a test to see if this range falls within the tablet, if it does not // then clip will throw an exception tabletRange.clip(range); } ScanDataSource dataSource = new ScanDataSource(authorizations, this.defaultSecurityLabel, columns, ssiList, ssio, interruptFlag); LookupResult result = null; try { SortedKeyValueIterator<Key,Value> iter = new SourceSwitchingIterator(dataSource); result = lookup(iter, ranges, columns, results, maxResultSize); return result; } catch (IOException ioe) { dataSource.close(true); throw ioe; } finally { // code in finally block because always want // to return mapfiles, even when exception is thrown dataSource.close(false); synchronized (this) { queryCount += results.size(); if (result != null) queryBytes += result.dataSize; } } } private Batch nextBatch(SortedKeyValueIterator<Key,Value> iter, Range range, int num, Set<Column> columns) throws IOException { // log.info("In nextBatch.."); List<KVEntry> results = new ArrayList<KVEntry>(); Key key = null; Value value; long resultSize = 0L; long resultBytes = 0L; long maxResultsSize = acuTableConf.getMemoryInBytes(Property.TABLE_SCAN_MAXMEM); if (columns.size() == 0) { iter.seek(range, LocalityGroupUtil.EMPTY_CF_SET, false); } else { iter.seek(range, LocalityGroupUtil.families(columns), true); } Key continueKey = null; boolean skipContinueKey = false; boolean endOfTabletReached = false; while (iter.hasTop()) { value = iter.getTopValue(); key = iter.getTopKey(); KVEntry kvEntry = new KVEntry(key, value); // copies key and value results.add(kvEntry); resultSize += kvEntry.estimateMemoryUsed(); resultBytes += kvEntry.numBytes(); if (resultSize >= maxResultsSize || results.size() >= num) { continueKey = new Key(key); skipContinueKey = true; break; } iter.next(); } if (iter.hasTop() == false) { endOfTabletReached = true; } Batch retBatch = new Batch(); retBatch.numBytes = resultBytes; if (!endOfTabletReached) { retBatch.continueKey = continueKey; retBatch.skipContinueKey = skipContinueKey; } else { retBatch.continueKey = null; } if (endOfTabletReached && results.size() == 0) retBatch.results = null; else retBatch.results = results; return retBatch; } /** * Determine if a JVM shutdown is in progress. * */ private boolean shutdownInProgress() { try { Runtime.getRuntime().removeShutdownHook(new Thread(new Runnable() { @Override public void run() {} })); } catch (IllegalStateException ise) { return true; } return false; } private class Batch { public boolean skipContinueKey; public List<KVEntry> results; public Key continueKey; public long numBytes; } Scanner createScanner(Range range, int num, Set<Column> columns, Authorizations authorizations, List<IterInfo> ssiList, Map<String,Map<String,String>> ssio, boolean isolated, AtomicBoolean interruptFlag) { // do a test to see if this range falls within the tablet, if it does not // then clip will throw an exception extent.toDataRange().clip(range); ScanOptions opts = new ScanOptions(num, authorizations, this.defaultSecurityLabel, columns, ssiList, ssio, interruptFlag, isolated); return new Scanner(range, opts); } class ScanBatch { boolean more; List<KVEntry> results; ScanBatch(List<KVEntry> results, boolean more) { this.results = results; this.more = more; } } class Scanner { private ScanOptions options; private Range range; private SortedKeyValueIterator<Key,Value> isolatedIter; private ScanDataSource isolatedDataSource; private boolean sawException = false; private boolean scanClosed = false; Scanner(Range range, ScanOptions options) { this.range = range; this.options = options; } synchronized ScanBatch read() throws IOException, TabletClosedException { if (sawException) throw new IllegalStateException("Tried to use scanner after exception occurred."); if (scanClosed) throw new IllegalStateException("Tried to use scanner after it was closed."); Batch results = null; ScanDataSource dataSource; if (options.isolated) { if (isolatedDataSource == null) isolatedDataSource = new ScanDataSource(options); dataSource = isolatedDataSource; } else { dataSource = new ScanDataSource(options); } try { SortedKeyValueIterator<Key,Value> iter; if (options.isolated) { if (isolatedIter == null) isolatedIter = new SourceSwitchingIterator(dataSource, true); else isolatedDataSource.fileManager.reattach(); iter = isolatedIter; } else { iter = new SourceSwitchingIterator(dataSource, false); } results = nextBatch(iter, range, options.num, options.columnSet); if (results.results == null) { range = null; return new ScanBatch(new ArrayList<Tablet.KVEntry>(), false); } else if (results.continueKey == null) { return new ScanBatch(results.results, false); } else { range = new Range(results.continueKey, !results.skipContinueKey, range.getEndKey(), range.isEndKeyInclusive()); return new ScanBatch(results.results, true); } } catch (IterationInterruptedException iie) { sawException = true; if (isClosed()) throw new TabletClosedException(iie); else throw iie; } catch (IOException ioe) { if (shutdownInProgress()) { log.debug("IOException while shutdown in progress ", ioe); throw new TabletClosedException(ioe); // assume IOException was caused by execution of HDFS shutdown hook } sawException = true; dataSource.close(true); throw ioe; } catch (RuntimeException re) { sawException = true; throw re; } finally { // code in finally block because always want // to return mapfiles, even when exception is thrown if (!options.isolated) dataSource.close(false); else if (dataSource.fileManager != null) dataSource.fileManager.detach(); synchronized (Tablet.this) { if (results != null && results.results != null) { long more = results.results.size(); queryCount += more; queryBytes += results.numBytes; } } } } // close and read are synchronized because can not call close on the data source while it is in use // this cloud lead to the case where file iterators that are in use by a thread are returned // to the pool... this would be bad void close() { options.interruptFlag.set(true); synchronized (this) { scanClosed = true; if (isolatedDataSource != null) isolatedDataSource.close(false); } } } static class ScanOptions { // scan options Authorizations authorizations; byte[] defaultLabels; Set<Column> columnSet; List<IterInfo> ssiList; Map<String,Map<String,String>> ssio; AtomicBoolean interruptFlag; int num; boolean isolated; ScanOptions(int num, Authorizations authorizations, byte[] defaultLabels, Set<Column> columnSet, List<IterInfo> ssiList, Map<String,Map<String,String>> ssio, AtomicBoolean interruptFlag, boolean isolated) { this.num = num; this.authorizations = authorizations; this.defaultLabels = defaultLabels; this.columnSet = columnSet; this.ssiList = ssiList; this.ssio = ssio; this.interruptFlag = interruptFlag; this.isolated = isolated; } } class ScanDataSource implements DataSource { // data source state private ScanFileManager fileManager; private SortedKeyValueIterator<Key,Value> iter; private long expectedDeletionCount; private List<MemoryIterator> memIters = null; private long fileReservationId; private AtomicBoolean interruptFlag; private StatsIterator statsIterator; ScanOptions options; ScanDataSource(Authorizations authorizations, byte[] defaultLabels, HashSet<Column> columnSet, List<IterInfo> ssiList, Map<String,Map<String,String>> ssio, AtomicBoolean interruptFlag) { expectedDeletionCount = dataSourceDeletions.get(); this.options = new ScanOptions(-1, authorizations, defaultLabels, columnSet, ssiList, ssio, interruptFlag, false); this.interruptFlag = interruptFlag; } ScanDataSource(ScanOptions options) { expectedDeletionCount = dataSourceDeletions.get(); this.options = options; this.interruptFlag = options.interruptFlag; } @Override public DataSource getNewDataSource() { if (!isCurrent()) { // log.debug("Switching data sources during a scan"); if (memIters != null) { tabletMemory.returnIterators(memIters); memIters = null; datafileManager.returnFilesForScan(fileReservationId); fileReservationId = -1; } if (fileManager != null) fileManager.releaseOpenFiles(false); expectedDeletionCount = dataSourceDeletions.get(); iter = null; return this; } else return this; } @Override public boolean isCurrent() { return expectedDeletionCount == dataSourceDeletions.get(); } @Override public SortedKeyValueIterator<Key,Value> iterator() throws IOException { if (iter == null) iter = createIterator(); return iter; } private SortedKeyValueIterator<Key,Value> createIterator() throws IOException { Map<FileRef,DataFileValue> files; synchronized (Tablet.this) { if (memIters != null) throw new IllegalStateException("Tried to create new scan iterator w/o releasing memory"); if (Tablet.this.closed) throw new TabletClosedException(); if (interruptFlag.get()) throw new IterationInterruptedException(extent.toString() + " " + interruptFlag.hashCode()); // only acquire the file manager when we know the tablet is open if (fileManager == null) { fileManager = tabletResources.newScanFileManager(); activeScans.add(this); } if (fileManager.getNumOpenFiles() != 0) throw new IllegalStateException("Tried to create new scan iterator w/o releasing files"); // set this before trying to get iterators in case // getIterators() throws an exception expectedDeletionCount = dataSourceDeletions.get(); memIters = tabletMemory.getIterators(); Pair<Long,Map<FileRef,DataFileValue>> reservation = datafileManager.reserveFilesForScan(); fileReservationId = reservation.getFirst(); files = reservation.getSecond(); } Collection<InterruptibleIterator> mapfiles = fileManager.openFiles(files, options.isolated); List<SortedKeyValueIterator<Key,Value>> iters = new ArrayList<SortedKeyValueIterator<Key,Value>>(mapfiles.size() + memIters.size()); iters.addAll(mapfiles); iters.addAll(memIters); for (SortedKeyValueIterator<Key,Value> skvi : iters) ((InterruptibleIterator) skvi).setInterruptFlag(interruptFlag); MultiIterator multiIter = new MultiIterator(iters, extent); TabletIteratorEnvironment iterEnv = new TabletIteratorEnvironment(IteratorScope.scan, acuTableConf, fileManager, files); statsIterator = new StatsIterator(multiIter, TabletServer.seekCount, scannedCount); DeletingIterator delIter = new DeletingIterator(statsIterator, false); ColumnFamilySkippingIterator cfsi = new ColumnFamilySkippingIterator(delIter); ColumnQualifierFilter colFilter = new ColumnQualifierFilter(cfsi, options.columnSet); VisibilityFilter visFilter = new VisibilityFilter(colFilter, options.authorizations, options.defaultLabels); return iterEnv.getTopLevelIterator(IteratorUtil .loadIterators(IteratorScope.scan, visFilter, extent, acuTableConf, options.ssiList, options.ssio, iterEnv)); } private void close(boolean sawErrors) { if (memIters != null) { tabletMemory.returnIterators(memIters); memIters = null; datafileManager.returnFilesForScan(fileReservationId); fileReservationId = -1; } synchronized (Tablet.this) { activeScans.remove(this); if (activeScans.size() == 0) Tablet.this.notifyAll(); } if (fileManager != null) { fileManager.releaseOpenFiles(sawErrors); fileManager = null; } if (statsIterator != null) { statsIterator.report(); } } public void interrupt() { interruptFlag.set(true); } @Override public DataSource getDeepCopyDataSource(IteratorEnvironment env) { throw new UnsupportedOperationException(); } } private DataFileValue minorCompact(Configuration conf, VolumeManager fs, InMemoryMap memTable, FileRef tmpDatafile, FileRef newDatafile, FileRef mergeFile, boolean hasQueueTime, long queued, CommitSession commitSession, long flushId, MinorCompactionReason mincReason) { boolean failed = false; long start = System.currentTimeMillis(); timer.incrementStatusMinor(); long count = 0; try { Span span = Trace.start("write"); CompactionStats stats; try { count = memTable.getNumEntries(); DataFileValue dfv = null; if (mergeFile != null) dfv = datafileManager.getDatafileSizes().get(mergeFile); MinorCompactor compactor = new MinorCompactor(conf, fs, memTable, mergeFile, dfv, tmpDatafile, acuTableConf, extent, mincReason); stats = compactor.call(); } finally { span.stop(); } span = Trace.start("bringOnline"); try { datafileManager.bringMinorCompactionOnline(tmpDatafile, newDatafile, mergeFile, new DataFileValue(stats.getFileSize(), stats.getEntriesWritten()), commitSession, flushId); } finally { span.stop(); } return new DataFileValue(stats.getFileSize(), stats.getEntriesWritten()); } catch (Exception E) { failed = true; throw new RuntimeException(E); } catch (Error E) { // Weird errors like "OutOfMemoryError" when trying to create the thread for the compaction failed = true; throw new RuntimeException(E); } finally { try { tabletMemory.finalizeMinC(); } catch (Throwable t) { log.error("Failed to free tablet memory", t); } if (!failed) { lastMinorCompactionFinishTime = System.currentTimeMillis(); } if (tabletServer.mincMetrics.isEnabled()) tabletServer.mincMetrics.add(TabletServerMinCMetrics.minc, (lastMinorCompactionFinishTime - start)); if (hasQueueTime) { timer.updateTime(Operation.MINOR, queued, start, count, failed); if (tabletServer.mincMetrics.isEnabled()) tabletServer.mincMetrics.add(TabletServerMinCMetrics.queue, (start - queued)); } else timer.updateTime(Operation.MINOR, start, count, failed); } } private class MinorCompactionTask implements Runnable { private long queued; private CommitSession commitSession; private DataFileValue stats; private FileRef mergeFile; private long flushId; private MinorCompactionReason mincReason; MinorCompactionTask(FileRef mergeFile, CommitSession commitSession, long flushId, MinorCompactionReason mincReason) { queued = System.currentTimeMillis(); minorCompactionWaitingToStart = true; this.commitSession = commitSession; this.mergeFile = mergeFile; this.flushId = flushId; this.mincReason = mincReason; } @Override public void run() { minorCompactionWaitingToStart = false; minorCompactionInProgress = true; Span minorCompaction = Trace.on("minorCompaction"); try { FileRef newMapfileLocation = getNextMapFilename(mergeFile == null ? "F" : "M"); FileRef tmpFileRef = new FileRef(newMapfileLocation.path() + "_tmp"); Span span = Trace.start("waitForCommits"); synchronized (Tablet.this) { commitSession.waitForCommitsToFinish(); } span.stop(); span = Trace.start("start"); while (true) { try { // the purpose of the minor compaction start event is to keep track of the filename... in the case // where the metadata table write for the minor compaction finishes and the process dies before // writing the minor compaction finish event, then the start event+filename in metadata table will // prevent recovery of duplicate data... the minor compaction start event could be written at any time // before the metadata write for the minor compaction tabletServer.minorCompactionStarted(commitSession, commitSession.getWALogSeq() + 1, newMapfileLocation.path().toString()); break; } catch (IOException e) { log.warn("Failed to write to write ahead log " + e.getMessage(), e); } } span.stop(); span = Trace.start("compact"); this.stats = minorCompact(conf, fs, tabletMemory.getMinCMemTable(), tmpFileRef, newMapfileLocation, mergeFile, true, queued, commitSession, flushId, mincReason); span.stop(); if (needsSplit()) { tabletServer.executeSplit(Tablet.this); } else { initiateMajorCompaction(MajorCompactionReason.NORMAL); } } catch (Throwable t) { log.error("Unknown error during minor compaction for extent: " + getExtent(), t); throw new RuntimeException(t); } finally { minorCompactionInProgress = false; minorCompaction.data("extent", extent.toString()); minorCompaction.data("numEntries", Long.toString(this.stats.getNumEntries())); minorCompaction.data("size", Long.toString(this.stats.getSize())); minorCompaction.stop(); } } } private synchronized MinorCompactionTask prepareForMinC(long flushId, MinorCompactionReason mincReason) { CommitSession oldCommitSession = tabletMemory.prepareForMinC(); otherLogs = currentLogs; currentLogs = new HashSet<DfsLogger>(); FileRef mergeFile = datafileManager.reserveMergingMinorCompactionFile(); return new MinorCompactionTask(mergeFile, oldCommitSession, flushId, mincReason); } void flush(long tableFlushID) { boolean updateMetadata = false; boolean initiateMinor = false; try { synchronized (this) { // only want one thing at a time to update flush ID to ensure that metadata table and tablet in memory state are consistent if (updatingFlushID) return; if (lastFlushID >= tableFlushID) return; if (closing || closed || tabletMemory.memoryReservedForMinC()) return; if (tabletMemory.getMemTable().getNumEntries() == 0) { lastFlushID = tableFlushID; updatingFlushID = true; updateMetadata = true; } else initiateMinor = true; } if (updateMetadata) { Credentials creds = SystemCredentials.get(); // if multiple threads were allowed to update this outside of a sync block, then it would be // a race condition MetadataTableUtil.updateTabletFlushID(extent, tableFlushID, creds, tabletServer.getLock()); } else if (initiateMinor) initiateMinorCompaction(tableFlushID, MinorCompactionReason.USER); } finally { if (updateMetadata) { synchronized (this) { updatingFlushID = false; this.notifyAll(); } } } } boolean initiateMinorCompaction(MinorCompactionReason mincReason) { if (isClosed()) { // don't bother trying to get flush id if closed... could be closed after this check but that is ok... just trying to cut down on uneeded log messages.... return false; } // get the flush id before the new memmap is made available for write long flushId; try { flushId = getFlushID(); } catch (NoNodeException e) { log.info("Asked to initiate MinC when there was no flush id " + getExtent() + " " + e.getMessage()); return false; } return initiateMinorCompaction(flushId, mincReason); } boolean minorCompactNow(MinorCompactionReason mincReason) { long flushId; try { flushId = getFlushID(); } catch (NoNodeException e) { log.info("Asked to initiate MinC when there was no flush id " + getExtent() + " " + e.getMessage()); return false; } MinorCompactionTask mct = createMinorCompactionTask(flushId, mincReason); if (mct == null) return false; mct.run(); return true; } boolean initiateMinorCompaction(long flushId, MinorCompactionReason mincReason) { MinorCompactionTask mct = createMinorCompactionTask(flushId, mincReason); if (mct == null) return false; tabletResources.executeMinorCompaction(mct); return true; } private MinorCompactionTask createMinorCompactionTask(long flushId, MinorCompactionReason mincReason) { MinorCompactionTask mct; long t1, t2; StringBuilder logMessage = null; try { synchronized (this) { t1 = System.currentTimeMillis(); if (closing || closed || majorCompactionWaitingToStart || tabletMemory.memoryReservedForMinC() || tabletMemory.getMemTable().getNumEntries() == 0 || updatingFlushID) { logMessage = new StringBuilder(); logMessage.append(extent.toString()); logMessage.append(" closing " + closing); logMessage.append(" closed " + closed); logMessage.append(" majorCompactionWaitingToStart " + majorCompactionWaitingToStart); if (tabletMemory != null) logMessage.append(" tabletMemory.memoryReservedForMinC() " + tabletMemory.memoryReservedForMinC()); if (tabletMemory != null && tabletMemory.getMemTable() != null) logMessage.append(" tabletMemory.getMemTable().getNumEntries() " + tabletMemory.getMemTable().getNumEntries()); logMessage.append(" updatingFlushID " + updatingFlushID); return null; } // We're still recovering log entries if (datafileManager == null) { logMessage = new StringBuilder(); logMessage.append(extent.toString()); logMessage.append(" datafileManager " + datafileManager); return null; } mct = prepareForMinC(flushId, mincReason); t2 = System.currentTimeMillis(); } } finally { // log outside of sync block if (logMessage != null && log.isDebugEnabled()) log.debug(logMessage); } log.debug(String.format("MinC initiate lock %.2f secs", (t2 - t1) / 1000.0)); return mct; } long getFlushID() throws NoNodeException { try { String zTablePath = Constants.ZROOT + "/" + HdfsZooInstance.getInstance().getInstanceID() + Constants.ZTABLES + "/" + extent.getTableId() + Constants.ZTABLE_FLUSH_ID; return Long.parseLong(new String(ZooReaderWriter.getRetryingInstance().getData(zTablePath, null))); } catch (InterruptedException e) { throw new RuntimeException(e); } catch (NumberFormatException nfe) { throw new RuntimeException(nfe); } catch (KeeperException ke) { if (ke instanceof NoNodeException) { throw (NoNodeException) ke; } else { throw new RuntimeException(ke); } } } long getCompactionCancelID() { String zTablePath = Constants.ZROOT + "/" + HdfsZooInstance.getInstance().getInstanceID() + Constants.ZTABLES + "/" + extent.getTableId() + Constants.ZTABLE_COMPACT_CANCEL_ID; try { return Long.parseLong(new String(ZooReaderWriter.getRetryingInstance().getData(zTablePath, null))); } catch (KeeperException e) { throw new RuntimeException(e); } catch (InterruptedException e) { throw new RuntimeException(e); } } Pair<Long,List<IteratorSetting>> getCompactionID() throws NoNodeException { try { String zTablePath = Constants.ZROOT + "/" + HdfsZooInstance.getInstance().getInstanceID() + Constants.ZTABLES + "/" + extent.getTableId() + Constants.ZTABLE_COMPACT_ID; String[] tokens = new String(ZooReaderWriter.getRetryingInstance().getData(zTablePath, null)).split(","); long compactID = Long.parseLong(tokens[0]); CompactionIterators iters = new CompactionIterators(); if (tokens.length > 1) { Hex hex = new Hex(); ByteArrayInputStream bais = new ByteArrayInputStream(hex.decode(tokens[1].split("=")[1].getBytes())); DataInputStream dis = new DataInputStream(bais); try { iters.readFields(dis); } catch (IOException e) { throw new RuntimeException(e); } KeyExtent ke = new KeyExtent(extent.getTableId(), iters.getEndRow(), iters.getStartRow()); if (!ke.overlaps(extent)) { // only use iterators if compaction range overlaps iters = new CompactionIterators(); } } return new Pair<Long,List<IteratorSetting>>(compactID, iters.getIterators()); } catch (InterruptedException e) { throw new RuntimeException(e); } catch (NumberFormatException nfe) { throw new RuntimeException(nfe); } catch (KeeperException ke) { if (ke instanceof NoNodeException) { throw (NoNodeException) ke; } else { throw new RuntimeException(ke); } } catch (DecoderException e) { throw new RuntimeException(e); } } public synchronized void waitForMinC() { tabletMemory.waitForMinC(); } static class TConstraintViolationException extends Exception { private static final long serialVersionUID = 1L; private Violations violations; private List<Mutation> violators; private List<Mutation> nonViolators; private CommitSession commitSession; TConstraintViolationException(Violations violations, List<Mutation> violators, List<Mutation> nonViolators, CommitSession commitSession) { this.violations = violations; this.violators = violators; this.nonViolators = nonViolators; this.commitSession = commitSession; } Violations getViolations() { return violations; } List<Mutation> getViolators() { return violators; } List<Mutation> getNonViolators() { return nonViolators; } CommitSession getCommitSession() { return commitSession; } } private synchronized CommitSession finishPreparingMutations(long time) { if (writesInProgress < 0) { throw new IllegalStateException("waitingForLogs < 0 " + writesInProgress); } if (closed || tabletMemory == null) { // log.debug("tablet closed, can't commit"); return null; } writesInProgress++; CommitSession commitSession = tabletMemory.getCommitSession(); commitSession.incrementCommitsInProgress(); commitSession.updateMaxCommittedTime(time); return commitSession; } public void checkConstraints() { ConstraintChecker cc = constraintChecker.get(); if (cc.classLoaderChanged()) { ConstraintChecker ncc = new ConstraintChecker(getTableConfiguration()); constraintChecker.compareAndSet(cc, ncc); } } public CommitSession prepareMutationsForCommit(TservConstraintEnv cenv, List<Mutation> mutations) throws TConstraintViolationException { ConstraintChecker cc = constraintChecker.get(); List<Mutation> violators = null; Violations violations = new Violations(); cenv.setExtent(extent); for (Mutation mutation : mutations) { Violations more = cc.check(cenv, mutation); if (more != null) { violations.add(more); if (violators == null) violators = new ArrayList<Mutation>(); violators.add(mutation); } } long time = tabletTime.setUpdateTimes(mutations); if (!violations.isEmpty()) { HashSet<Mutation> violatorsSet = new HashSet<Mutation>(violators); ArrayList<Mutation> nonViolators = new ArrayList<Mutation>(); for (Mutation mutation : mutations) { if (!violatorsSet.contains(mutation)) { nonViolators.add(mutation); } } CommitSession commitSession = null; if (nonViolators.size() > 0) { // if everything is a violation, then it is expected that // code calling this will not log or commit commitSession = finishPreparingMutations(time); if (commitSession == null) return null; } throw new TConstraintViolationException(violations, violators, nonViolators, commitSession); } return finishPreparingMutations(time); } public synchronized void abortCommit(CommitSession commitSession, List<Mutation> value) { if (writesInProgress <= 0) { throw new IllegalStateException("waitingForLogs <= 0 " + writesInProgress); } if (closeComplete || tabletMemory == null) { throw new IllegalStateException("aborting commit when tablet is closed"); } commitSession.decrementCommitsInProgress(); writesInProgress--; if (writesInProgress == 0) this.notifyAll(); } public void commit(CommitSession commitSession, List<Mutation> mutations) { int totalCount = 0; long totalBytes = 0; // write the mutation to the in memory table for (Mutation mutation : mutations) { totalCount += mutation.size(); totalBytes += mutation.numBytes(); } tabletMemory.mutate(commitSession, mutations); synchronized (this) { if (writesInProgress < 1) { throw new IllegalStateException("commiting mutations after logging, but not waiting for any log messages"); } if (closed && closeComplete) { throw new IllegalStateException("tablet closed with outstanding messages to the logger"); } tabletMemory.updateMemoryUsageStats(); // decrement here in case an exception is thrown below writesInProgress--; if (writesInProgress == 0) this.notifyAll(); commitSession.decrementCommitsInProgress(); numEntries += totalCount; numEntriesInMemory += totalCount; ingestCount += totalCount; ingestBytes += totalBytes; } } /** * Closes the mapfiles associated with a Tablet. If saveState is true, a minor compaction is performed. */ public void close(boolean saveState) throws IOException { initiateClose(saveState, false, false); completeClose(saveState, true); } void initiateClose(boolean saveState, boolean queueMinC, boolean disableWrites) { if (!saveState && queueMinC) { throw new IllegalArgumentException("Not saving state on close and requesting minor compactions queue does not make sense"); } log.debug("initiateClose(saveState=" + saveState + " queueMinC=" + queueMinC + " disableWrites=" + disableWrites + ") " + getExtent()); MinorCompactionTask mct = null; synchronized (this) { if (closed || closing || closeComplete) { String msg = "Tablet " + getExtent() + " already"; if (closed) msg += " closed"; if (closing) msg += " closing"; if (closeComplete) msg += " closeComplete"; throw new IllegalStateException(msg); } // enter the closing state, no splits, minor, or major compactions can start // should cause running major compactions to stop closing = true; this.notifyAll(); // determines if inserts and queries can still continue while minor compacting closed = disableWrites; // wait for major compactions to finish, setting closing to // true should cause any running major compactions to abort while (majorCompactionInProgress) { try { this.wait(50); } catch (InterruptedException e) { log.error(e.toString()); } } while (updatingFlushID) { try { this.wait(50); } catch (InterruptedException e) { log.error(e.toString()); } } if (!saveState || tabletMemory.getMemTable().getNumEntries() == 0) { return; } tabletMemory.waitForMinC(); try { mct = prepareForMinC(getFlushID(), MinorCompactionReason.CLOSE); } catch (NoNodeException e) { throw new RuntimeException(e); } if (queueMinC) { tabletResources.executeMinorCompaction(mct); return; } } // do minor compaction outside of synch block so that tablet can be read and written to while // compaction runs mct.run(); } private boolean closeCompleting = false; synchronized void completeClose(boolean saveState, boolean completeClose) throws IOException { if (!closing || closeComplete || closeCompleting) { throw new IllegalStateException("closing = " + closing + " closed = " + closed + " closeComplete = " + closeComplete + " closeCompleting = " + closeCompleting); } log.debug("completeClose(saveState=" + saveState + " completeClose=" + completeClose + ") " + getExtent()); // ensure this method is only called once, also guards against multiple // threads entering the method at the same time closeCompleting = true; closed = true; // modify dataSourceDeletions so scans will try to switch data sources and fail because the tablet is closed dataSourceDeletions.incrementAndGet(); for (ScanDataSource activeScan : activeScans) { activeScan.interrupt(); } // wait for reads and writes to complete while (writesInProgress > 0 || activeScans.size() > 0) { try { this.wait(50); } catch (InterruptedException e) { log.error(e.toString()); } } tabletMemory.waitForMinC(); if (saveState && tabletMemory.getMemTable().getNumEntries() > 0) { try { prepareForMinC(getFlushID(), MinorCompactionReason.CLOSE).run(); } catch (NoNodeException e) { throw new RuntimeException(e); } } if (saveState) { // at this point all tablet data is flushed, so do a consistency check RuntimeException err = null; for (int i = 0; i < 5; i++) { try { closeConsistencyCheck(); err = null; } catch (RuntimeException t) { err = t; log.error("Consistency check fails, retrying " + t); UtilWaitThread.sleep(500); } } if (err != null) { ProblemReports.getInstance().report(new ProblemReport(extent.getTableId().toString(), ProblemType.TABLET_LOAD, this.extent.toString(), err)); log.error("Tablet closed consistency check has failed for " + this.extent + " giving up and closing"); } } try { tabletMemory.getMemTable().delete(0); } catch (Throwable t) { log.error("Failed to delete mem table : " + t.getMessage(), t); } tabletMemory = null; // close map files tabletResources.close(); log.log(TLevel.TABLET_HIST, extent + " closed"); acuTableConf.removeObserver(configObserver); closeComplete = completeClose; } private void closeConsistencyCheck() { if (tabletMemory.getMemTable().getNumEntries() != 0) { String msg = "Closed tablet " + extent + " has " + tabletMemory.getMemTable().getNumEntries() + " entries in memory"; log.error(msg); throw new RuntimeException(msg); } if (tabletMemory.memoryReservedForMinC()) { String msg = "Closed tablet " + extent + " has minor compacting memory"; log.error(msg); throw new RuntimeException(msg); } try { Pair<List<LogEntry>,SortedMap<FileRef,DataFileValue>> fileLog = MetadataTableUtil.getFileAndLogEntries(SystemCredentials.get(), extent); if (fileLog.getFirst().size() != 0) { String msg = "Closed tablet " + extent + " has walog entries in " + MetadataTable.NAME + " " + fileLog.getFirst(); log.error(msg); throw new RuntimeException(msg); } if (extent.isRootTablet()) { if (!fileLog.getSecond().keySet().equals(datafileManager.getDatafileSizes().keySet())) { String msg = "Data file in " + RootTable.NAME + " differ from in memory data " + extent + " " + fileLog.getSecond().keySet() + " " + datafileManager.getDatafileSizes().keySet(); log.error(msg); throw new RuntimeException(msg); } } else { if (!fileLog.getSecond().equals(datafileManager.getDatafileSizes())) { String msg = "Data file in " + MetadataTable.NAME + " differ from in memory data " + extent + " " + fileLog.getSecond() + " " + datafileManager.getDatafileSizes(); log.error(msg); throw new RuntimeException(msg); } } } catch (Exception e) { String msg = "Failed to do close consistency check for tablet " + extent; log.error(msg, e); throw new RuntimeException(msg, e); } if (otherLogs.size() != 0 || currentLogs.size() != 0) { String msg = "Closed tablet " + extent + " has walog entries in memory currentLogs = " + currentLogs + " otherLogs = " + otherLogs; log.error(msg); throw new RuntimeException(msg); } // TODO check lastFlushID and lostCompactID - ACCUMULO-1290 } /** * Returns a Path object representing the tablet's location on the DFS. * * @return location */ public Path getLocation() { return location; } private class CompactionRunner implements Runnable, Comparable<CompactionRunner> { long queued; long start; boolean failed = false; private MajorCompactionReason reason; public CompactionRunner(MajorCompactionReason reason) { queued = System.currentTimeMillis(); this.reason = reason; } @Override public void run() { CompactionStats majCStats = null; if (tabletServer.isMajorCompactionDisabled()) { // this will make compaction task that were queued when shutdown was // initiated exit majorCompactionQueued.remove(reason); return; } try { timer.incrementStatusMajor(); start = System.currentTimeMillis(); majCStats = majorCompact(reason); // if there is more work to be done, queue another major compaction synchronized (Tablet.this) { if (reason == MajorCompactionReason.NORMAL && needsMajorCompaction(reason)) initiateMajorCompaction(reason); } } catch (RuntimeException E) { failed = true; } finally { long count = 0; if (majCStats != null) { count = majCStats.getEntriesRead(); } timer.updateTime(Operation.MAJOR, queued, start, count, failed); } } // We used to synchronize on the Tablet before fetching this information, // but this method is called by the compaction queue thread to re-order the compactions. // The compaction queue holds a lock during this sort. // A tablet lock can be held while putting itself on the queue, so we can't lock the tablet // while pulling information used to sort the tablets in the queue, or we may get deadlocked. // See ACCUMULO-1110. private int getNumFiles() { return datafileManager.datafileSizes.size(); } @Override public int compareTo(CompactionRunner o) { int cmp = reason.compareTo(o.reason); if (cmp != 0) return cmp; if (reason == MajorCompactionReason.USER || reason == MajorCompactionReason.CHOP) { // for these types of compactions want to do the oldest first cmp = (int) (queued - o.queued); if (cmp != 0) return cmp; } return o.getNumFiles() - this.getNumFiles(); } } synchronized boolean initiateMajorCompaction(MajorCompactionReason reason) { if (closing || closed || !needsMajorCompaction(reason) || majorCompactionInProgress || majorCompactionQueued.contains(reason)) { return false; } majorCompactionQueued.add(reason); tabletResources.executeMajorCompaction(getExtent(), new CompactionRunner(reason)); return false; } /** * Returns true if a major compaction should be performed on the tablet. * */ public boolean needsMajorCompaction(MajorCompactionReason reason) { if (majorCompactionInProgress) return false; if (reason == MajorCompactionReason.CHOP || reason == MajorCompactionReason.USER) return true; return tabletResources.needsMajorCompaction(datafileManager.getDatafileSizes(), reason); } /** * Returns an int representing the total block size of the mapfiles served by this tablet. * * @return size */ // this is the size of just the mapfiles public long estimateTabletSize() { long size = 0L; for (DataFileValue sz : datafileManager.getDatafileSizes().values()) size += sz.getSize(); return size; } private boolean sawBigRow = false; private long timeOfLastMinCWhenBigFreakinRowWasSeen = 0; private long timeOfLastImportWhenBigFreakinRowWasSeen = 0; private long splitCreationTime; private static class SplitRowSpec { double splitRatio; Text row; SplitRowSpec(double splitRatio, Text row) { this.splitRatio = splitRatio; this.row = row; } } private SplitRowSpec findSplitRow(Collection<FileRef> files) { // never split the root tablet // check if we already decided that we can never split // check to see if we're big enough to split long splitThreshold = acuTableConf.getMemoryInBytes(Property.TABLE_SPLIT_THRESHOLD); if (extent.isRootTablet() || estimateTabletSize() <= splitThreshold) { return null; } // have seen a big row before, do not bother checking unless a minor compaction or map file import has occurred. if (sawBigRow) { if (timeOfLastMinCWhenBigFreakinRowWasSeen != lastMinorCompactionFinishTime || timeOfLastImportWhenBigFreakinRowWasSeen != lastMapFileImportTime) { // a minor compaction or map file import has occurred... check again sawBigRow = false; } else { // nothing changed, do not split return null; } } SortedMap<Double,Key> keys = null; try { // we should make .25 below configurable keys = FileUtil.findMidPoint(fs, tabletServer.getSystemConfiguration(), extent.getPrevEndRow(), extent.getEndRow(), files, .25); } catch (IOException e) { log.error("Failed to find midpoint " + e.getMessage()); return null; } // check to see if one row takes up most of the tablet, in which case we can not split try { Text lastRow; if (extent.getEndRow() == null) { Key lastKey = (Key) FileUtil.findLastKey(fs, tabletServer.getSystemConfiguration(), files); lastRow = lastKey.getRow(); } else { lastRow = extent.getEndRow(); } // check to see that the midPoint is not equal to the end key if (keys.get(.5).compareRow(lastRow) == 0) { if (keys.firstKey() < .5) { Key candidate = keys.get(keys.firstKey()); if (candidate.compareRow(lastRow) != 0) { // we should use this ratio in split size estimations if (log.isTraceEnabled()) log.trace(String.format("Splitting at %6.2f instead of .5, row at .5 is same as end row%n", keys.firstKey())); return new SplitRowSpec(keys.firstKey(), candidate.getRow()); } } log.warn("Cannot split tablet " + extent + " it contains a big row : " + lastRow); sawBigRow = true; timeOfLastMinCWhenBigFreakinRowWasSeen = lastMinorCompactionFinishTime; timeOfLastImportWhenBigFreakinRowWasSeen = lastMapFileImportTime; return null; } Key mid = keys.get(.5); Text text = (mid == null) ? null : mid.getRow(); SortedMap<Double,Key> firstHalf = keys.headMap(.5); if (firstHalf.size() > 0) { Text beforeMid = firstHalf.get(firstHalf.lastKey()).getRow(); Text shorter = new Text(); int trunc = longestCommonLength(text, beforeMid); shorter.set(text.getBytes(), 0, Math.min(text.getLength(), trunc + 1)); text = shorter; } return new SplitRowSpec(.5, text); } catch (IOException e) { // don't split now, but check again later log.error("Failed to find lastkey " + e.getMessage()); return null; } } private static int longestCommonLength(Text text, Text beforeMid) { int common = 0; while (common < text.getLength() && common < beforeMid.getLength() && text.getBytes()[common] == beforeMid.getBytes()[common]) { common++; } return common; } private Map<FileRef,Pair<Key,Key>> getFirstAndLastKeys(SortedMap<FileRef,DataFileValue> allFiles) throws IOException { Map<FileRef,Pair<Key,Key>> result = new HashMap<FileRef,Pair<Key,Key>>(); FileOperations fileFactory = FileOperations.getInstance(); for (Entry<FileRef,DataFileValue> entry : allFiles.entrySet()) { FileRef file = entry.getKey(); FileSystem ns = fs.getFileSystemByPath(file.path()); FileSKVIterator openReader = fileFactory.openReader(file.path().toString(), true, ns, ns.getConf(), this.getTableConfiguration()); try { Key first = openReader.getFirstKey(); Key last = openReader.getLastKey(); result.put(file, new Pair<Key,Key>(first, last)); } finally { openReader.close(); } } return result; } List<FileRef> findChopFiles(KeyExtent extent, Map<FileRef,Pair<Key,Key>> firstAndLastKeys, Collection<FileRef> allFiles) throws IOException { List<FileRef> result = new ArrayList<FileRef>(); if (firstAndLastKeys == null) { result.addAll(allFiles); return result; } for (FileRef file : allFiles) { Pair<Key,Key> pair = firstAndLastKeys.get(file); if (pair == null) { // file was created or imported after we obtained the first and last keys... there // are a few options here... throw an exception which will cause the compaction to // retry and also cause ugly error message that the admin has to ignore... could // go get the first and last key, but this code is called while the tablet lock // is held... or just compact the file.... result.add(file); } else { Key first = pair.getFirst(); Key last = pair.getSecond(); // If first and last are null, it's an empty file. Add it to the compact set so it goes away. if ((first == null && last == null) || !extent.contains(first.getRow()) || !extent.contains(last.getRow())) { result.add(file); } } } return result; } /** * Returns true if this tablet needs to be split * */ public synchronized boolean needsSplit() { boolean ret; if (closing || closed) ret = false; else ret = findSplitRow(datafileManager.getFiles()) != null; return ret; } // BEGIN PRIVATE METHODS RELATED TO MAJOR COMPACTION private boolean isCompactionEnabled() { return !closing && !tabletServer.isMajorCompactionDisabled(); } private CompactionStats _majorCompact(MajorCompactionReason reason) throws IOException, CompactionCanceledException { long t1, t2, t3; // acquire file info outside of tablet lock CompactionStrategy strategy = Property.createInstanceFromPropertyName(acuTableConf, Property.TABLE_COMPACTION_STRATEGY, CompactionStrategy.class, new DefaultCompactionStrategy()); strategy.init(Property.getCompactionStrategyOptions(acuTableConf)); Map<FileRef,Pair<Key,Key>> firstAndLastKeys = null; if (reason == MajorCompactionReason.CHOP) { firstAndLastKeys = getFirstAndLastKeys(datafileManager.getDatafileSizes()); } else if (reason != MajorCompactionReason.USER) { MajorCompactionRequest request = new MajorCompactionRequest(extent, reason, fs, acuTableConf); request.setFiles(datafileManager.getDatafileSizes()); strategy.gatherInformation(request); } Map<FileRef, DataFileValue> filesToCompact; int maxFilesToCompact = acuTableConf.getCount(Property.TSERV_MAJC_THREAD_MAXOPEN); CompactionStats majCStats = new CompactionStats(); CompactionPlan plan = null; boolean propogateDeletes = false; synchronized (this) { // plan all that work that needs to be done in the sync block... then do the actual work // outside the sync block t1 = System.currentTimeMillis(); majorCompactionWaitingToStart = true; tabletMemory.waitForMinC(); t2 = System.currentTimeMillis(); majorCompactionWaitingToStart = false; notifyAll(); if (extent.isRootTablet()) { // very important that we call this before doing major compaction, // otherwise deleted compacted files could possible be brought back // at some point if the file they were compacted to was legitimately // removed by a major compaction cleanUpFiles(fs, fs.listStatus(this.location), false); } SortedMap<FileRef,DataFileValue> allFiles = datafileManager.getDatafileSizes(); List<FileRef> inputFiles = new ArrayList<FileRef>(); if (reason == MajorCompactionReason.CHOP) { // enforce rules: files with keys outside our range need to be compacted inputFiles.addAll(findChopFiles(extent, firstAndLastKeys, allFiles.keySet())); } else if (reason == MajorCompactionReason.USER) { inputFiles.addAll(allFiles.keySet()); } else { MajorCompactionRequest request = new MajorCompactionRequest(extent, reason, fs, acuTableConf); request.setFiles(allFiles); plan = strategy.getCompactionPlan(request); if (plan != null) inputFiles.addAll(plan.inputFiles); } if (inputFiles.isEmpty()) { return majCStats; } // If no original files will exist at the end of the compaction, we do not have to propogate deletes Set<FileRef> droppedFiles = new HashSet<FileRef>(); droppedFiles.addAll(inputFiles); if (plan != null) droppedFiles.addAll(plan.deleteFiles); - propogateDeletes = droppedFiles.equals(allFiles.keySet()); + propogateDeletes = !(droppedFiles.equals(allFiles.keySet())); log.debug("Major compaction plan: " + plan + " propogate deletes : " + propogateDeletes); filesToCompact = new HashMap<FileRef,DataFileValue>(allFiles); filesToCompact.keySet().retainAll(inputFiles); t3 = System.currentTimeMillis(); datafileManager.reserveMajorCompactingFiles(filesToCompact.keySet()); } try { log.debug(String.format("MajC initiate lock %.2f secs, wait %.2f secs", (t3 - t2) / 1000.0, (t2 - t1) / 1000.0)); Pair<Long,List<IteratorSetting>> compactionId = null; if (!propogateDeletes) { // compacting everything, so update the compaction id in !METADATA try { compactionId = getCompactionID(); } catch (NoNodeException e) { throw new RuntimeException(e); } } List<IteratorSetting> compactionIterators = new ArrayList<IteratorSetting>(); if (compactionId != null) { if (reason == MajorCompactionReason.USER) { if (getCompactionCancelID() >= compactionId.getFirst()) { // compaction was canceled return majCStats; } synchronized (this) { if (lastCompactID >= compactionId.getFirst()) // already compacted return majCStats; } } compactionIterators = compactionId.getSecond(); } // need to handle case where only one file is being major compacted while (filesToCompact.size() > 0) { int numToCompact = maxFilesToCompact; if (filesToCompact.size() > maxFilesToCompact && filesToCompact.size() < 2 * maxFilesToCompact) { // on the second to last compaction pass, compact the minimum amount of files possible numToCompact = filesToCompact.size() - maxFilesToCompact + 1; } Set<FileRef> smallestFiles = removeSmallest(filesToCompact, numToCompact); FileRef fileName = getNextMapFilename((filesToCompact.size() == 0 && !propogateDeletes) ? "A" : "C"); FileRef compactTmpName = new FileRef(fileName.path().toString() + "_tmp"); AccumuloConfiguration tableConf = createTableConfiguration(acuTableConf, plan); Span span = Trace.start("compactFiles"); try { CompactionEnv cenv = new CompactionEnv() { @Override public boolean isCompactionEnabled() { return Tablet.this.isCompactionEnabled(); } @Override public IteratorScope getIteratorScope() { return IteratorScope.majc; } }; HashMap<FileRef,DataFileValue> copy = new HashMap<FileRef,DataFileValue>(datafileManager.getDatafileSizes()); if (!copy.keySet().containsAll(smallestFiles)) throw new IllegalStateException("Cannot find data file values for " + smallestFiles); copy.keySet().retainAll(smallestFiles); log.debug("Starting MajC " + extent + " (" + reason + ") " + copy.keySet() + " --> " + compactTmpName + " " + compactionIterators); // always propagate deletes, unless last batch boolean lastBatch = filesToCompact.isEmpty(); Compactor compactor = new Compactor(conf, fs, copy, null, compactTmpName, lastBatch ? propogateDeletes : true, tableConf, extent, cenv, compactionIterators, reason); CompactionStats mcs = compactor.call(); span.data("files", "" + smallestFiles.size()); span.data("read", "" + mcs.getEntriesRead()); span.data("written", "" + mcs.getEntriesWritten()); majCStats.add(mcs); if (lastBatch && plan != null && plan.deleteFiles != null) { smallestFiles.addAll(plan.deleteFiles); } datafileManager.bringMajorCompactionOnline(smallestFiles, compactTmpName, fileName, filesToCompact.size() == 0 && compactionId != null ? compactionId.getFirst() : null, new DataFileValue(mcs.getFileSize(), mcs.getEntriesWritten())); // when major compaction produces a file w/ zero entries, it will be deleted... do not want // to add the deleted file if (filesToCompact.size() > 0 && mcs.getEntriesWritten() > 0) { filesToCompact.put(fileName, new DataFileValue(mcs.getFileSize(), mcs.getEntriesWritten())); } } finally { span.stop(); } } return majCStats; } finally { synchronized (Tablet.this) { datafileManager.clearMajorCompactingFile(); } } } private AccumuloConfiguration createTableConfiguration(TableConfiguration base, CompactionPlan plan) { if (plan == null || plan.writeParameters == null) return base; WriteParameters p = plan.writeParameters; ConfigurationCopy result = new ConfigurationCopy(base); if (p.getHdfsBlockSize() > 0) result.set(Property.TABLE_FILE_BLOCK_SIZE, "" + p.getHdfsBlockSize()); if (p.getBlockSize() > 0) result.set(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE, "" + p.getBlockSize()); if (p.getIndexBlockSize() > 0) result.set(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX, "" + p.getBlockSize()); if (p.getCompressType() != null) result.set(Property.TABLE_FILE_COMPRESSION_TYPE, p.getCompressType()); if (p.getReplication() != 0) result.set(Property.TABLE_FILE_REPLICATION, "" + p.getReplication()); return result; } private Set<FileRef> removeSmallest(Map<FileRef,DataFileValue> filesToCompact, int maxFilesToCompact) { // ensure this method works properly when multiple files have the same size PriorityQueue<Pair<FileRef,Long>> fileHeap = new PriorityQueue<Pair<FileRef,Long>>(filesToCompact.size(), new Comparator<Pair<FileRef,Long>>() { @Override public int compare(Pair<FileRef,Long> o1, Pair<FileRef,Long> o2) { if (o1.getSecond() == o2.getSecond()) return o1.getFirst().compareTo(o2.getFirst()); if (o1.getSecond() < o2.getSecond()) return -1; return 1; } }); for (Iterator<Entry<FileRef,DataFileValue>> iterator = filesToCompact.entrySet().iterator(); iterator.hasNext();) { Entry<FileRef,DataFileValue> entry = iterator.next(); fileHeap.add(new Pair<FileRef,Long>(entry.getKey(), entry.getValue().getSize())); } Set<FileRef> smallestFiles = new HashSet<FileRef>(); while (smallestFiles.size() < maxFilesToCompact && fileHeap.size() > 0) { Pair<FileRef,Long> pair = fileHeap.remove(); filesToCompact.remove(pair.getFirst()); smallestFiles.add(pair.getFirst()); } return smallestFiles; } // END PRIVATE METHODS RELATED TO MAJOR COMPACTION /** * Performs a major compaction on the tablet. If needsSplit() returns true, the tablet is split and a reference to the new tablet is returned. */ private CompactionStats majorCompact(MajorCompactionReason reason) { CompactionStats majCStats = null; // Always trace majC Span span = Trace.on("majorCompaction"); try { synchronized (this) { // check that compaction is still needed - defer to splitting majorCompactionQueued.remove(reason); if (closing || closed || !needsMajorCompaction(reason) || majorCompactionInProgress || needsSplit()) { return null; } majorCompactionInProgress = true; } majCStats = _majorCompact(reason); if (reason == MajorCompactionReason.CHOP) { MetadataTableUtil.chopped(getExtent(), this.tabletServer.getLock()); tabletServer.enqueueMasterMessage(new TabletStatusMessage(TabletLoadState.CHOPPED, extent)); } } catch (CompactionCanceledException mcce) { log.debug("Major compaction canceled, extent = " + getExtent()); throw new RuntimeException(mcce); } catch (Throwable t) { log.error("MajC Failed, extent = " + getExtent()); log.error("MajC Failed, message = " + (t.getMessage() == null ? t.getClass().getName() : t.getMessage()), t); throw new RuntimeException(t); } finally { // ensure we always reset boolean, even // when an exception is thrown synchronized (this) { majorCompactionInProgress = false; this.notifyAll(); } Span curr = Trace.currentTrace(); curr.data("extent", "" + getExtent()); if (majCStats != null) { curr.data("read", "" + majCStats.getEntriesRead()); curr.data("written", "" + majCStats.getEntriesWritten()); } span.stop(); } return majCStats; } /** * Returns a KeyExtent object representing this tablet's key range. * * @return extent */ public KeyExtent getExtent() { return extent; } private synchronized void computeNumEntries() { Collection<DataFileValue> vals = datafileManager.getDatafileSizes().values(); long numEntries = 0; for (DataFileValue tableValue : vals) { numEntries += tableValue.getNumEntries(); } this.numEntriesInMemory = tabletMemory.getNumEntries(); numEntries += tabletMemory.getNumEntries(); this.numEntries = numEntries; } public long getNumEntries() { return numEntries; } public long getNumEntriesInMemory() { return numEntriesInMemory; } public synchronized boolean isClosing() { return closing; } public synchronized boolean isClosed() { return closed; } public synchronized boolean isCloseComplete() { return closeComplete; } public boolean majorCompactionRunning() { return this.majorCompactionInProgress; } public boolean minorCompactionQueued() { return minorCompactionWaitingToStart; } public boolean minorCompactionRunning() { return minorCompactionInProgress; } public boolean majorCompactionQueued() { return majorCompactionQueued.size() > 0; } /** * operations are disallowed while we split which is ok since splitting is fast * * a minor compaction should have taken place before calling this so there should be relatively little left to compact * * we just need to make sure major compactions aren't occurring if we have the major compactor thread decide who needs splitting we can avoid synchronization * issues with major compactions * */ static class SplitInfo { String dir; SortedMap<FileRef,DataFileValue> datafiles; String time; long initFlushID; long initCompactID; SplitInfo(String d, SortedMap<FileRef,DataFileValue> dfv, String time, long initFlushID, long initCompactID) { this.dir = d; this.datafiles = dfv; this.time = time; this.initFlushID = initFlushID; this.initCompactID = initCompactID; } } public TreeMap<KeyExtent,SplitInfo> split(byte[] sp) throws IOException { if (sp != null && extent.getEndRow() != null && extent.getEndRow().equals(new Text(sp))) { throw new IllegalArgumentException(); } if (extent.isRootTablet()) { String msg = "Cannot split root tablet"; log.warn(msg); throw new RuntimeException(msg); } try { initiateClose(true, false, false); } catch (IllegalStateException ise) { log.debug("File " + extent + " not splitting : " + ise.getMessage()); return null; } // obtain this info outside of synch block since it will involve opening // the map files... it is ok if the set of map files changes, because // this info is used for optimization... it is ok if map files are missing // from the set... can still query and insert into the tablet while this // map file operation is happening Map<FileRef,FileUtil.FileInfo> firstAndLastRows = FileUtil.tryToGetFirstAndLastRows(fs, tabletServer.getSystemConfiguration(), datafileManager.getFiles()); synchronized (this) { // java needs tuples ... TreeMap<KeyExtent,SplitInfo> newTablets = new TreeMap<KeyExtent,SplitInfo>(); long t1 = System.currentTimeMillis(); // choose a split point SplitRowSpec splitPoint; if (sp == null) splitPoint = findSplitRow(datafileManager.getFiles()); else { Text tsp = new Text(sp); splitPoint = new SplitRowSpec(FileUtil.estimatePercentageLTE(fs, tabletServer.getSystemConfiguration(), extent.getPrevEndRow(), extent.getEndRow(), datafileManager.getFiles(), tsp), tsp); } if (splitPoint == null || splitPoint.row == null) { log.info("had to abort split because splitRow was null"); closing = false; return null; } closed = true; completeClose(true, false); Text midRow = splitPoint.row; double splitRatio = splitPoint.splitRatio; KeyExtent low = new KeyExtent(extent.getTableId(), midRow, extent.getPrevEndRow()); KeyExtent high = new KeyExtent(extent.getTableId(), extent.getEndRow(), midRow); String lowDirectory = TabletOperations.createTabletDirectory(fs, extent.getTableId().toString(), midRow); // write new tablet information to MetadataTable SortedMap<FileRef,DataFileValue> lowDatafileSizes = new TreeMap<FileRef,DataFileValue>(); SortedMap<FileRef,DataFileValue> highDatafileSizes = new TreeMap<FileRef,DataFileValue>(); List<FileRef> highDatafilesToRemove = new ArrayList<FileRef>(); MetadataTableUtil.splitDatafiles(extent.getTableId(), midRow, splitRatio, firstAndLastRows, datafileManager.getDatafileSizes(), lowDatafileSizes, highDatafileSizes, highDatafilesToRemove); log.debug("Files for low split " + low + " " + lowDatafileSizes.keySet()); log.debug("Files for high split " + high + " " + highDatafileSizes.keySet()); String time = tabletTime.getMetadataValue(); // it is possible that some of the bulk loading flags will be deleted after being read below because the bulk load // finishes.... therefore split could propogate load flags for a finished bulk load... there is a special iterator // on the !METADATA table to clean up this type of garbage Map<FileRef,Long> bulkLoadedFiles = MetadataTableUtil.getBulkFilesLoaded(SystemCredentials.get(), extent); MetadataTableUtil.splitTablet(high, extent.getPrevEndRow(), splitRatio, SystemCredentials.get(), tabletServer.getLock()); MetadataTableUtil.addNewTablet(low, lowDirectory, tabletServer.getTabletSession(), lowDatafileSizes, bulkLoadedFiles, SystemCredentials.get(), time, lastFlushID, lastCompactID, tabletServer.getLock()); MetadataTableUtil.finishSplit(high, highDatafileSizes, highDatafilesToRemove, SystemCredentials.get(), tabletServer.getLock()); log.log(TLevel.TABLET_HIST, extent + " split " + low + " " + high); newTablets.put(high, new SplitInfo(tabletDirectory, highDatafileSizes, time, lastFlushID, lastCompactID)); newTablets.put(low, new SplitInfo(lowDirectory, lowDatafileSizes, time, lastFlushID, lastCompactID)); long t2 = System.currentTimeMillis(); log.debug(String.format("offline split time : %6.2f secs", (t2 - t1) / 1000.0)); closeComplete = true; return newTablets; } } public SortedMap<FileRef,DataFileValue> getDatafiles() { return datafileManager.getDatafileSizes(); } public double queryRate() { return queryRate.rate(); } public double queryByteRate() { return queryByteRate.rate(); } public double ingestRate() { return ingestRate.rate(); } public double ingestByteRate() { return ingestByteRate.rate(); } public double scanRate() { return scannedRate.rate(); } public long totalQueries() { return this.queryCount; } public long totalIngest() { return this.ingestCount; } // synchronized? public void updateRates(long now) { queryRate.update(now, queryCount); queryByteRate.update(now, queryBytes); ingestRate.update(now, ingestCount); ingestByteRate.update(now, ingestBytes); scannedRate.update(now, scannedCount.get()); } public long getSplitCreationTime() { return splitCreationTime; } public void importMapFiles(long tid, Map<FileRef,MapFileInfo> fileMap, boolean setTime) throws IOException { Map<FileRef,DataFileValue> entries = new HashMap<FileRef,DataFileValue>(fileMap.size()); for (FileRef path : fileMap.keySet()) { MapFileInfo mfi = fileMap.get(path); entries.put(path, new DataFileValue(mfi.estimatedSize, 0l)); } // Clients timeout and will think that this operation failed. // Don't do it if we spent too long waiting for the lock long now = System.currentTimeMillis(); synchronized (this) { if (closed) { throw new IOException("tablet " + extent + " is closed"); } // TODO check seems uneeded now - ACCUMULO-1291 long lockWait = System.currentTimeMillis() - now; if (lockWait > tabletServer.getSystemConfiguration().getTimeInMillis(Property.GENERAL_RPC_TIMEOUT)) { throw new IOException("Timeout waiting " + (lockWait / 1000.) + " seconds to get tablet lock"); } if (writesInProgress < 0) throw new IllegalStateException("writesInProgress < 0 " + writesInProgress); writesInProgress++; } try { datafileManager.importMapFiles(tid, entries, setTime); lastMapFileImportTime = System.currentTimeMillis(); if (needsSplit()) { tabletServer.executeSplit(this); } else { initiateMajorCompaction(MajorCompactionReason.NORMAL); } } finally { synchronized (this) { if (writesInProgress < 1) throw new IllegalStateException("writesInProgress < 1 " + writesInProgress); writesInProgress--; if (writesInProgress == 0) this.notifyAll(); } } } private Set<DfsLogger> currentLogs = new HashSet<DfsLogger>(); public Set<String> getCurrentLogs() { Set<String> result = new HashSet<String>(); synchronized (currentLogs) { for (DfsLogger log : currentLogs) { result.add(log.toString()); } } return result; } private Set<String> beginClearingUnusedLogs() { Set<String> doomed = new HashSet<String>(); ArrayList<String> otherLogsCopy = new ArrayList<String>(); ArrayList<String> currentLogsCopy = new ArrayList<String>(); // do not hold tablet lock while acquiring the log lock logLock.lock(); synchronized (this) { if (removingLogs) throw new IllegalStateException("Attempted to clear logs when removal of logs in progress"); for (DfsLogger logger : otherLogs) { otherLogsCopy.add(logger.toString()); doomed.add(logger.toString()); } for (DfsLogger logger : currentLogs) { currentLogsCopy.add(logger.toString()); doomed.remove(logger.toString()); } otherLogs = Collections.emptySet(); if (doomed.size() > 0) removingLogs = true; } // do debug logging outside tablet lock for (String logger : otherLogsCopy) { log.debug("Logs for memory compacted: " + getExtent() + " " + logger.toString()); } for (String logger : currentLogsCopy) { log.debug("Logs for current memory: " + getExtent() + " " + logger); } return doomed; } private synchronized void finishClearingUnusedLogs() { removingLogs = false; logLock.unlock(); } private Set<DfsLogger> otherLogs = Collections.emptySet(); private boolean removingLogs = false; // this lock is basically used to synchronize writing of log info to !METADATA private final ReentrantLock logLock = new ReentrantLock(); public synchronized int getLogCount() { return currentLogs.size(); } private boolean beginUpdatingLogsUsed(InMemoryMap memTable, Collection<DfsLogger> more, boolean mincFinish) { boolean releaseLock = true; // do not hold tablet lock while acquiring the log lock logLock.lock(); try { synchronized (this) { if (closed && closeComplete) { throw new IllegalStateException("Can not update logs of closed tablet " + extent); } boolean addToOther; if (memTable == tabletMemory.otherMemTable) addToOther = true; else if (memTable == tabletMemory.memTable) addToOther = false; else throw new IllegalArgumentException("passed in memtable that is not in use"); if (mincFinish) { if (addToOther) throw new IllegalStateException("Adding to other logs for mincFinish"); if (otherLogs.size() != 0) throw new IllegalStateException("Expect other logs to be 0 when min finish, but its " + otherLogs); // when writing a minc finish event, there is no need to add the log to !METADATA // if nothing has been logged for the tablet since the minor compaction started if (currentLogs.size() == 0) return false; } int numAdded = 0; int numContained = 0; for (DfsLogger logger : more) { if (addToOther) { if (otherLogs.add(logger)) numAdded++; if (currentLogs.contains(logger)) numContained++; } else { if (currentLogs.add(logger)) numAdded++; if (otherLogs.contains(logger)) numContained++; } } if (numAdded > 0 && numAdded != more.size()) { // expect to add all or none throw new IllegalArgumentException("Added subset of logs " + extent + " " + more + " " + currentLogs); } if (numContained > 0 && numContained != more.size()) { // expect to contain all or none throw new IllegalArgumentException("Other logs contained subset of logs " + extent + " " + more + " " + otherLogs); } if (numAdded > 0 && numContained == 0) { releaseLock = false; return true; } return false; } } finally { if (releaseLock) logLock.unlock(); } } private void finishUpdatingLogsUsed() { logLock.unlock(); } synchronized public void chopFiles() { initiateMajorCompaction(MajorCompactionReason.CHOP); } public void compactAll(long compactionId) { boolean updateMetadata = false; synchronized (this) { if (lastCompactID >= compactionId) return; if (closing || closed || majorCompactionQueued.contains(MajorCompactionReason.USER) || majorCompactionInProgress) return; if (datafileManager.getDatafileSizes().size() == 0) { // no files, so jsut update the metadata table majorCompactionInProgress = true; updateMetadata = true; lastCompactID = compactionId; } else initiateMajorCompaction(MajorCompactionReason.USER); } if (updateMetadata) { try { // if multiple threads were allowed to update this outside of a sync block, then it would be // a race condition MetadataTableUtil.updateTabletCompactID(extent, compactionId, SystemCredentials.get(), tabletServer.getLock()); } finally { synchronized (this) { majorCompactionInProgress = false; this.notifyAll(); } } } } public TableConfiguration getTableConfiguration() { return tabletServer.getTableConfiguration(extent); } }
true
true
private CompactionStats _majorCompact(MajorCompactionReason reason) throws IOException, CompactionCanceledException { long t1, t2, t3; // acquire file info outside of tablet lock CompactionStrategy strategy = Property.createInstanceFromPropertyName(acuTableConf, Property.TABLE_COMPACTION_STRATEGY, CompactionStrategy.class, new DefaultCompactionStrategy()); strategy.init(Property.getCompactionStrategyOptions(acuTableConf)); Map<FileRef,Pair<Key,Key>> firstAndLastKeys = null; if (reason == MajorCompactionReason.CHOP) { firstAndLastKeys = getFirstAndLastKeys(datafileManager.getDatafileSizes()); } else if (reason != MajorCompactionReason.USER) { MajorCompactionRequest request = new MajorCompactionRequest(extent, reason, fs, acuTableConf); request.setFiles(datafileManager.getDatafileSizes()); strategy.gatherInformation(request); } Map<FileRef, DataFileValue> filesToCompact; int maxFilesToCompact = acuTableConf.getCount(Property.TSERV_MAJC_THREAD_MAXOPEN); CompactionStats majCStats = new CompactionStats(); CompactionPlan plan = null; boolean propogateDeletes = false; synchronized (this) { // plan all that work that needs to be done in the sync block... then do the actual work // outside the sync block t1 = System.currentTimeMillis(); majorCompactionWaitingToStart = true; tabletMemory.waitForMinC(); t2 = System.currentTimeMillis(); majorCompactionWaitingToStart = false; notifyAll(); if (extent.isRootTablet()) { // very important that we call this before doing major compaction, // otherwise deleted compacted files could possible be brought back // at some point if the file they were compacted to was legitimately // removed by a major compaction cleanUpFiles(fs, fs.listStatus(this.location), false); } SortedMap<FileRef,DataFileValue> allFiles = datafileManager.getDatafileSizes(); List<FileRef> inputFiles = new ArrayList<FileRef>(); if (reason == MajorCompactionReason.CHOP) { // enforce rules: files with keys outside our range need to be compacted inputFiles.addAll(findChopFiles(extent, firstAndLastKeys, allFiles.keySet())); } else if (reason == MajorCompactionReason.USER) { inputFiles.addAll(allFiles.keySet()); } else { MajorCompactionRequest request = new MajorCompactionRequest(extent, reason, fs, acuTableConf); request.setFiles(allFiles); plan = strategy.getCompactionPlan(request); if (plan != null) inputFiles.addAll(plan.inputFiles); } if (inputFiles.isEmpty()) { return majCStats; } // If no original files will exist at the end of the compaction, we do not have to propogate deletes Set<FileRef> droppedFiles = new HashSet<FileRef>(); droppedFiles.addAll(inputFiles); if (plan != null) droppedFiles.addAll(plan.deleteFiles); propogateDeletes = droppedFiles.equals(allFiles.keySet()); log.debug("Major compaction plan: " + plan + " propogate deletes : " + propogateDeletes); filesToCompact = new HashMap<FileRef,DataFileValue>(allFiles); filesToCompact.keySet().retainAll(inputFiles); t3 = System.currentTimeMillis(); datafileManager.reserveMajorCompactingFiles(filesToCompact.keySet()); } try { log.debug(String.format("MajC initiate lock %.2f secs, wait %.2f secs", (t3 - t2) / 1000.0, (t2 - t1) / 1000.0)); Pair<Long,List<IteratorSetting>> compactionId = null; if (!propogateDeletes) { // compacting everything, so update the compaction id in !METADATA try { compactionId = getCompactionID(); } catch (NoNodeException e) { throw new RuntimeException(e); } } List<IteratorSetting> compactionIterators = new ArrayList<IteratorSetting>(); if (compactionId != null) { if (reason == MajorCompactionReason.USER) { if (getCompactionCancelID() >= compactionId.getFirst()) { // compaction was canceled return majCStats; } synchronized (this) { if (lastCompactID >= compactionId.getFirst()) // already compacted return majCStats; } } compactionIterators = compactionId.getSecond(); } // need to handle case where only one file is being major compacted while (filesToCompact.size() > 0) { int numToCompact = maxFilesToCompact; if (filesToCompact.size() > maxFilesToCompact && filesToCompact.size() < 2 * maxFilesToCompact) { // on the second to last compaction pass, compact the minimum amount of files possible numToCompact = filesToCompact.size() - maxFilesToCompact + 1; } Set<FileRef> smallestFiles = removeSmallest(filesToCompact, numToCompact); FileRef fileName = getNextMapFilename((filesToCompact.size() == 0 && !propogateDeletes) ? "A" : "C"); FileRef compactTmpName = new FileRef(fileName.path().toString() + "_tmp"); AccumuloConfiguration tableConf = createTableConfiguration(acuTableConf, plan); Span span = Trace.start("compactFiles"); try { CompactionEnv cenv = new CompactionEnv() { @Override public boolean isCompactionEnabled() { return Tablet.this.isCompactionEnabled(); } @Override public IteratorScope getIteratorScope() { return IteratorScope.majc; } }; HashMap<FileRef,DataFileValue> copy = new HashMap<FileRef,DataFileValue>(datafileManager.getDatafileSizes()); if (!copy.keySet().containsAll(smallestFiles)) throw new IllegalStateException("Cannot find data file values for " + smallestFiles); copy.keySet().retainAll(smallestFiles); log.debug("Starting MajC " + extent + " (" + reason + ") " + copy.keySet() + " --> " + compactTmpName + " " + compactionIterators); // always propagate deletes, unless last batch boolean lastBatch = filesToCompact.isEmpty(); Compactor compactor = new Compactor(conf, fs, copy, null, compactTmpName, lastBatch ? propogateDeletes : true, tableConf, extent, cenv, compactionIterators, reason); CompactionStats mcs = compactor.call(); span.data("files", "" + smallestFiles.size()); span.data("read", "" + mcs.getEntriesRead()); span.data("written", "" + mcs.getEntriesWritten()); majCStats.add(mcs); if (lastBatch && plan != null && plan.deleteFiles != null) { smallestFiles.addAll(plan.deleteFiles); } datafileManager.bringMajorCompactionOnline(smallestFiles, compactTmpName, fileName, filesToCompact.size() == 0 && compactionId != null ? compactionId.getFirst() : null, new DataFileValue(mcs.getFileSize(), mcs.getEntriesWritten())); // when major compaction produces a file w/ zero entries, it will be deleted... do not want // to add the deleted file if (filesToCompact.size() > 0 && mcs.getEntriesWritten() > 0) { filesToCompact.put(fileName, new DataFileValue(mcs.getFileSize(), mcs.getEntriesWritten())); } } finally { span.stop(); } } return majCStats; } finally { synchronized (Tablet.this) { datafileManager.clearMajorCompactingFile(); } } }
private CompactionStats _majorCompact(MajorCompactionReason reason) throws IOException, CompactionCanceledException { long t1, t2, t3; // acquire file info outside of tablet lock CompactionStrategy strategy = Property.createInstanceFromPropertyName(acuTableConf, Property.TABLE_COMPACTION_STRATEGY, CompactionStrategy.class, new DefaultCompactionStrategy()); strategy.init(Property.getCompactionStrategyOptions(acuTableConf)); Map<FileRef,Pair<Key,Key>> firstAndLastKeys = null; if (reason == MajorCompactionReason.CHOP) { firstAndLastKeys = getFirstAndLastKeys(datafileManager.getDatafileSizes()); } else if (reason != MajorCompactionReason.USER) { MajorCompactionRequest request = new MajorCompactionRequest(extent, reason, fs, acuTableConf); request.setFiles(datafileManager.getDatafileSizes()); strategy.gatherInformation(request); } Map<FileRef, DataFileValue> filesToCompact; int maxFilesToCompact = acuTableConf.getCount(Property.TSERV_MAJC_THREAD_MAXOPEN); CompactionStats majCStats = new CompactionStats(); CompactionPlan plan = null; boolean propogateDeletes = false; synchronized (this) { // plan all that work that needs to be done in the sync block... then do the actual work // outside the sync block t1 = System.currentTimeMillis(); majorCompactionWaitingToStart = true; tabletMemory.waitForMinC(); t2 = System.currentTimeMillis(); majorCompactionWaitingToStart = false; notifyAll(); if (extent.isRootTablet()) { // very important that we call this before doing major compaction, // otherwise deleted compacted files could possible be brought back // at some point if the file they were compacted to was legitimately // removed by a major compaction cleanUpFiles(fs, fs.listStatus(this.location), false); } SortedMap<FileRef,DataFileValue> allFiles = datafileManager.getDatafileSizes(); List<FileRef> inputFiles = new ArrayList<FileRef>(); if (reason == MajorCompactionReason.CHOP) { // enforce rules: files with keys outside our range need to be compacted inputFiles.addAll(findChopFiles(extent, firstAndLastKeys, allFiles.keySet())); } else if (reason == MajorCompactionReason.USER) { inputFiles.addAll(allFiles.keySet()); } else { MajorCompactionRequest request = new MajorCompactionRequest(extent, reason, fs, acuTableConf); request.setFiles(allFiles); plan = strategy.getCompactionPlan(request); if (plan != null) inputFiles.addAll(plan.inputFiles); } if (inputFiles.isEmpty()) { return majCStats; } // If no original files will exist at the end of the compaction, we do not have to propogate deletes Set<FileRef> droppedFiles = new HashSet<FileRef>(); droppedFiles.addAll(inputFiles); if (plan != null) droppedFiles.addAll(plan.deleteFiles); propogateDeletes = !(droppedFiles.equals(allFiles.keySet())); log.debug("Major compaction plan: " + plan + " propogate deletes : " + propogateDeletes); filesToCompact = new HashMap<FileRef,DataFileValue>(allFiles); filesToCompact.keySet().retainAll(inputFiles); t3 = System.currentTimeMillis(); datafileManager.reserveMajorCompactingFiles(filesToCompact.keySet()); } try { log.debug(String.format("MajC initiate lock %.2f secs, wait %.2f secs", (t3 - t2) / 1000.0, (t2 - t1) / 1000.0)); Pair<Long,List<IteratorSetting>> compactionId = null; if (!propogateDeletes) { // compacting everything, so update the compaction id in !METADATA try { compactionId = getCompactionID(); } catch (NoNodeException e) { throw new RuntimeException(e); } } List<IteratorSetting> compactionIterators = new ArrayList<IteratorSetting>(); if (compactionId != null) { if (reason == MajorCompactionReason.USER) { if (getCompactionCancelID() >= compactionId.getFirst()) { // compaction was canceled return majCStats; } synchronized (this) { if (lastCompactID >= compactionId.getFirst()) // already compacted return majCStats; } } compactionIterators = compactionId.getSecond(); } // need to handle case where only one file is being major compacted while (filesToCompact.size() > 0) { int numToCompact = maxFilesToCompact; if (filesToCompact.size() > maxFilesToCompact && filesToCompact.size() < 2 * maxFilesToCompact) { // on the second to last compaction pass, compact the minimum amount of files possible numToCompact = filesToCompact.size() - maxFilesToCompact + 1; } Set<FileRef> smallestFiles = removeSmallest(filesToCompact, numToCompact); FileRef fileName = getNextMapFilename((filesToCompact.size() == 0 && !propogateDeletes) ? "A" : "C"); FileRef compactTmpName = new FileRef(fileName.path().toString() + "_tmp"); AccumuloConfiguration tableConf = createTableConfiguration(acuTableConf, plan); Span span = Trace.start("compactFiles"); try { CompactionEnv cenv = new CompactionEnv() { @Override public boolean isCompactionEnabled() { return Tablet.this.isCompactionEnabled(); } @Override public IteratorScope getIteratorScope() { return IteratorScope.majc; } }; HashMap<FileRef,DataFileValue> copy = new HashMap<FileRef,DataFileValue>(datafileManager.getDatafileSizes()); if (!copy.keySet().containsAll(smallestFiles)) throw new IllegalStateException("Cannot find data file values for " + smallestFiles); copy.keySet().retainAll(smallestFiles); log.debug("Starting MajC " + extent + " (" + reason + ") " + copy.keySet() + " --> " + compactTmpName + " " + compactionIterators); // always propagate deletes, unless last batch boolean lastBatch = filesToCompact.isEmpty(); Compactor compactor = new Compactor(conf, fs, copy, null, compactTmpName, lastBatch ? propogateDeletes : true, tableConf, extent, cenv, compactionIterators, reason); CompactionStats mcs = compactor.call(); span.data("files", "" + smallestFiles.size()); span.data("read", "" + mcs.getEntriesRead()); span.data("written", "" + mcs.getEntriesWritten()); majCStats.add(mcs); if (lastBatch && plan != null && plan.deleteFiles != null) { smallestFiles.addAll(plan.deleteFiles); } datafileManager.bringMajorCompactionOnline(smallestFiles, compactTmpName, fileName, filesToCompact.size() == 0 && compactionId != null ? compactionId.getFirst() : null, new DataFileValue(mcs.getFileSize(), mcs.getEntriesWritten())); // when major compaction produces a file w/ zero entries, it will be deleted... do not want // to add the deleted file if (filesToCompact.size() > 0 && mcs.getEntriesWritten() > 0) { filesToCompact.put(fileName, new DataFileValue(mcs.getFileSize(), mcs.getEntriesWritten())); } } finally { span.stop(); } } return majCStats; } finally { synchronized (Tablet.this) { datafileManager.clearMajorCompactingFile(); } } }
diff --git a/src/info/ata4/unity/assetbundle/AssetBundleWriter.java b/src/info/ata4/unity/assetbundle/AssetBundleWriter.java index e9d973d..bd8933e 100644 --- a/src/info/ata4/unity/assetbundle/AssetBundleWriter.java +++ b/src/info/ata4/unity/assetbundle/AssetBundleWriter.java @@ -1,165 +1,165 @@ /* ** 2014 December 03 ** ** The author disclaims copyright to this source code. In place of ** a legal notice, here is a blessing: ** May you do good and not evil. ** May you find forgiveness for yourself and forgive others. ** May you share freely, never taking more than you give. */ package info.ata4.unity.assetbundle; import info.ata4.io.DataWriter; import info.ata4.io.socket.Sockets; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import static java.nio.file.StandardOpenOption.*; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import info.ata4.io.lzma.LzmaEncoderProps; import net.contrapunctus.lzma.LzmaOutputStream; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.tuple.MutablePair; /** * * @author Nico Bergemann <barracuda415 at yahoo.de> */ public class AssetBundleWriter { private final AssetBundleHeader header = new AssetBundleHeader(); private final List<AssetBundleEntry> entries = new ArrayList<>(); private final Map<AssetBundleEntry, MutablePair<Long, Long>> levelOffsetMap = new LinkedHashMap<>(); public AssetBundleHeader getHeader() { return header; } public void addEntry(AssetBundleEntry entry) { entries.add(entry); } public void clearEntries() { entries.clear(); } public void write(Path file) throws IOException { // add offset placeholders levelOffsetMap.clear(); for (AssetBundleEntry entry : entries) { String name = entry.getName(); if (name.equals("mainData") || name.startsWith("level") || entries.size() == 1) { levelOffsetMap.put(entry, new MutablePair<>(0L, 0L)); } } header.getLevelOffsets().clear(); header.getLevelOffsets().addAll(levelOffsetMap.values()); header.setNumberOfLevels(levelOffsetMap.size()); try (DataWriter out = new DataWriter(Sockets.forFile(file, CREATE, WRITE, TRUNCATE_EXISTING))) { // write header header.write(out); header.setHeaderSize((int) out.position()); // write bundle data if (header.isCompressed()) { // write data to temporary file Path dataFile = Files.createTempFile(file.getParent(), "uncompressedData", null); try (DataWriter outData = new DataWriter(Sockets.forFile(dataFile, CREATE, READ, WRITE, TRUNCATE_EXISTING, DELETE_ON_CLOSE))) { writeData(outData); // configure LZMA encoder LzmaEncoderProps props = new LzmaEncoderProps(); - props.setDictionarySize(1 << 19); - props.setNumFastBytes(273); + props.setDictionarySize(1 << 23); // 8 MiB + props.setNumFastBytes(273); // maximum props.setUncompressedSize(outData.size()); props.setEndMarkerMode(true); // stream the temporary bundle data compressed into the bundle file outData.position(0); try ( InputStream is = outData.getSocket().getInputStream(); OutputStream os = new LzmaOutputStream(new BufferedOutputStream( out.getSocket().getOutputStream()), props); ) { IOUtils.copy(is, os); } } for (MutablePair<Long, Long> levelOffset : levelOffsetMap.values()) { levelOffset.setLeft(out.size()); } } else { // write data directly to file writeData(out); } // update header int fileSize = (int) out.size(); header.setCompleteFileSize(fileSize); header.setMinimumStreamedBytes(fileSize); out.position(0); out.writeStruct(header); } } private void writeData(DataWriter out) throws IOException { // write entry list long baseOffset = out.position(); out.writeInt(entries.size()); List<AssetBundleEntryInfo> entryInfos = new ArrayList<>(entries.size()); for (AssetBundleEntry entry : entries) { AssetBundleEntryInfo entryInfo = new AssetBundleEntryInfo(); entryInfo.setName(entry.getName()); entryInfo.setSize(entry.getSize()); entryInfo.write(out); entryInfos.add(entryInfo); } // write entry data for (int i = 0; i < entries.size(); i++) { out.align(4); AssetBundleEntry entry = entries.get(i); AssetBundleEntryInfo entryInfo = entryInfos.get(i); entryInfo.setOffset(out.position() - baseOffset); if (i == 0) { header.setDataHeaderSize(entryInfo.getOffset()); } try ( InputStream is = entry.getInputStream(); OutputStream os = out.getSocket().getOutputStream(); ) { IOUtils.copy(is, os); } MutablePair<Long, Long> levelOffset = levelOffsetMap.get(entry); if (levelOffset != null) { long offset = out.position() - baseOffset; levelOffset.setLeft(offset); levelOffset.setRight(offset); } } // update offsets out.position(baseOffset + 4); for (AssetBundleEntryInfo entryInfo : entryInfos) { entryInfo.write(out); } } }
true
true
public void write(Path file) throws IOException { // add offset placeholders levelOffsetMap.clear(); for (AssetBundleEntry entry : entries) { String name = entry.getName(); if (name.equals("mainData") || name.startsWith("level") || entries.size() == 1) { levelOffsetMap.put(entry, new MutablePair<>(0L, 0L)); } } header.getLevelOffsets().clear(); header.getLevelOffsets().addAll(levelOffsetMap.values()); header.setNumberOfLevels(levelOffsetMap.size()); try (DataWriter out = new DataWriter(Sockets.forFile(file, CREATE, WRITE, TRUNCATE_EXISTING))) { // write header header.write(out); header.setHeaderSize((int) out.position()); // write bundle data if (header.isCompressed()) { // write data to temporary file Path dataFile = Files.createTempFile(file.getParent(), "uncompressedData", null); try (DataWriter outData = new DataWriter(Sockets.forFile(dataFile, CREATE, READ, WRITE, TRUNCATE_EXISTING, DELETE_ON_CLOSE))) { writeData(outData); // configure LZMA encoder LzmaEncoderProps props = new LzmaEncoderProps(); props.setDictionarySize(1 << 19); props.setNumFastBytes(273); props.setUncompressedSize(outData.size()); props.setEndMarkerMode(true); // stream the temporary bundle data compressed into the bundle file outData.position(0); try ( InputStream is = outData.getSocket().getInputStream(); OutputStream os = new LzmaOutputStream(new BufferedOutputStream( out.getSocket().getOutputStream()), props); ) { IOUtils.copy(is, os); } } for (MutablePair<Long, Long> levelOffset : levelOffsetMap.values()) { levelOffset.setLeft(out.size()); } } else { // write data directly to file writeData(out); } // update header int fileSize = (int) out.size(); header.setCompleteFileSize(fileSize); header.setMinimumStreamedBytes(fileSize); out.position(0); out.writeStruct(header); } }
public void write(Path file) throws IOException { // add offset placeholders levelOffsetMap.clear(); for (AssetBundleEntry entry : entries) { String name = entry.getName(); if (name.equals("mainData") || name.startsWith("level") || entries.size() == 1) { levelOffsetMap.put(entry, new MutablePair<>(0L, 0L)); } } header.getLevelOffsets().clear(); header.getLevelOffsets().addAll(levelOffsetMap.values()); header.setNumberOfLevels(levelOffsetMap.size()); try (DataWriter out = new DataWriter(Sockets.forFile(file, CREATE, WRITE, TRUNCATE_EXISTING))) { // write header header.write(out); header.setHeaderSize((int) out.position()); // write bundle data if (header.isCompressed()) { // write data to temporary file Path dataFile = Files.createTempFile(file.getParent(), "uncompressedData", null); try (DataWriter outData = new DataWriter(Sockets.forFile(dataFile, CREATE, READ, WRITE, TRUNCATE_EXISTING, DELETE_ON_CLOSE))) { writeData(outData); // configure LZMA encoder LzmaEncoderProps props = new LzmaEncoderProps(); props.setDictionarySize(1 << 23); // 8 MiB props.setNumFastBytes(273); // maximum props.setUncompressedSize(outData.size()); props.setEndMarkerMode(true); // stream the temporary bundle data compressed into the bundle file outData.position(0); try ( InputStream is = outData.getSocket().getInputStream(); OutputStream os = new LzmaOutputStream(new BufferedOutputStream( out.getSocket().getOutputStream()), props); ) { IOUtils.copy(is, os); } } for (MutablePair<Long, Long> levelOffset : levelOffsetMap.values()) { levelOffset.setLeft(out.size()); } } else { // write data directly to file writeData(out); } // update header int fileSize = (int) out.size(); header.setCompleteFileSize(fileSize); header.setMinimumStreamedBytes(fileSize); out.position(0); out.writeStruct(header); } }
diff --git a/src/com/android/launcher2/DragLayer.java b/src/com/android/launcher2/DragLayer.java index 2e72f622..eb539450 100644 --- a/src/com/android/launcher2/DragLayer.java +++ b/src/com/android/launcher2/DragLayer.java @@ -1,104 +1,106 @@ /* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.launcher2; import com.android.launcher.R; import android.content.Context; import android.graphics.Bitmap; import android.util.AttributeSet; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.View; import android.widget.FrameLayout; import android.widget.ImageView; /** * A ViewGroup that coordinates dragging across its descendants */ public class DragLayer extends FrameLayout { private DragController mDragController; private int[] mTmpXY = new int[2]; /** * Used to create a new DragLayer from XML. * * @param context The application's context. * @param attrs The attributes set containing the Workspace's customization values. */ public DragLayer(Context context, AttributeSet attrs) { super(context, attrs); // Disable multitouch across the workspace/all apps/customize tray setMotionEventSplittingEnabled(false); } public void setDragController(DragController controller) { mDragController = controller; } @Override public boolean dispatchKeyEvent(KeyEvent event) { return mDragController.dispatchKeyEvent(event) || super.dispatchKeyEvent(event); } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { // If the current CellLayoutChildren has a resize frame, we need to detect if any touch // event has occurred which doesn't result in resizing a widget. In this case, we // dismiss any visible resize frames. final Workspace w = (Workspace) findViewById(R.id.workspace); - final CellLayout currentPage = (CellLayout) w.getChildAt(w.getCurrentPage()); - final CellLayoutChildren childrenLayout = currentPage.getChildrenLayout(); + if (w != null) { + final CellLayout currentPage = (CellLayout) w.getChildAt(w.getCurrentPage()); + final CellLayoutChildren childrenLayout = currentPage.getChildrenLayout(); - if (childrenLayout.hasResizeFrames() && !childrenLayout.isWidgetBeingResized()) { - post(new Runnable() { - public void run() { - if (!childrenLayout.isWidgetBeingResized()) { - childrenLayout.clearAllResizeFrames(); + if (childrenLayout.hasResizeFrames() && !childrenLayout.isWidgetBeingResized()) { + post(new Runnable() { + public void run() { + if (!childrenLayout.isWidgetBeingResized()) { + childrenLayout.clearAllResizeFrames(); + } } - } - }); + }); + } } return mDragController.onInterceptTouchEvent(ev); } @Override public boolean onTouchEvent(MotionEvent ev) { return mDragController.onTouchEvent(ev); } @Override public boolean dispatchUnhandledMove(View focused, int direction) { return mDragController.dispatchUnhandledMove(focused, direction); } public View createDragView(Bitmap b, int xPos, int yPos) { ImageView imageView = new ImageView(mContext); imageView.setImageBitmap(b); imageView.setX(xPos); imageView.setY(yPos); addView(imageView, b.getWidth(), b.getHeight()); return imageView; } public View createDragView(View v) { v.getLocationOnScreen(mTmpXY); return createDragView(mDragController.getViewBitmap(v), mTmpXY[0], mTmpXY[1]); } }
false
true
public boolean onInterceptTouchEvent(MotionEvent ev) { // If the current CellLayoutChildren has a resize frame, we need to detect if any touch // event has occurred which doesn't result in resizing a widget. In this case, we // dismiss any visible resize frames. final Workspace w = (Workspace) findViewById(R.id.workspace); final CellLayout currentPage = (CellLayout) w.getChildAt(w.getCurrentPage()); final CellLayoutChildren childrenLayout = currentPage.getChildrenLayout(); if (childrenLayout.hasResizeFrames() && !childrenLayout.isWidgetBeingResized()) { post(new Runnable() { public void run() { if (!childrenLayout.isWidgetBeingResized()) { childrenLayout.clearAllResizeFrames(); } } }); } return mDragController.onInterceptTouchEvent(ev); }
public boolean onInterceptTouchEvent(MotionEvent ev) { // If the current CellLayoutChildren has a resize frame, we need to detect if any touch // event has occurred which doesn't result in resizing a widget. In this case, we // dismiss any visible resize frames. final Workspace w = (Workspace) findViewById(R.id.workspace); if (w != null) { final CellLayout currentPage = (CellLayout) w.getChildAt(w.getCurrentPage()); final CellLayoutChildren childrenLayout = currentPage.getChildrenLayout(); if (childrenLayout.hasResizeFrames() && !childrenLayout.isWidgetBeingResized()) { post(new Runnable() { public void run() { if (!childrenLayout.isWidgetBeingResized()) { childrenLayout.clearAllResizeFrames(); } } }); } } return mDragController.onInterceptTouchEvent(ev); }
diff --git a/src/main/java/org/jenkinsci/plugins/maveninvoker/MavenInvokerArchiver.java b/src/main/java/org/jenkinsci/plugins/maveninvoker/MavenInvokerArchiver.java index 8b40ebb..c9137db 100644 --- a/src/main/java/org/jenkinsci/plugins/maveninvoker/MavenInvokerArchiver.java +++ b/src/main/java/org/jenkinsci/plugins/maveninvoker/MavenInvokerArchiver.java @@ -1,171 +1,174 @@ package org.jenkinsci.plugins.maveninvoker; /* * Copyright (c) Olivier Lamy * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import hudson.Extension; import hudson.FilePath; import hudson.maven.MavenBuild; import hudson.maven.MavenBuildProxy; import hudson.maven.MavenModule; import hudson.maven.MavenReporter; import hudson.maven.MavenReporterDescriptor; import hudson.maven.MojoInfo; import hudson.model.BuildListener; import hudson.model.Result; import org.apache.commons.io.IOUtils; import org.apache.maven.plugin.invoker.model.BuildJob; import org.apache.maven.plugin.invoker.model.io.xpp3.BuildJobXpp3Reader; import org.apache.maven.project.MavenProject; import org.codehaus.plexus.component.configurator.ComponentConfigurationException; import org.codehaus.plexus.util.xml.pull.XmlPullParserException; import org.jenkinsci.plugins.maveninvoker.results.MavenInvokerResult; import org.jenkinsci.plugins.maveninvoker.results.MavenInvokerResults; import java.io.File; import java.io.FileInputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.util.Arrays; /** * @author Olivier Lamy * @since 1.1 */ public class MavenInvokerArchiver extends MavenReporter { @Override public boolean postExecute( MavenBuildProxy build, MavenProject pom, MojoInfo mojo, BuildListener listener, Throwable error ) throws InterruptedException, IOException { if ( !mojo.is( "org.apache.maven.plugins", "maven-invoker-plugin", "run" ) && !mojo.is( "org.apache.maven.plugins", "maven-invoker-plugin", "integration-test" ) ) { return true; } + final String buildDirectory = new File( pom.getBuild().getDirectory() ).getName(); final PrintStream logger = listener.getLogger(); logger.println( "MavenInvokerArchiver" ); File[] reports = new File[0]; try { final File reportsDir = mojo.getConfigurationValue( "reportsDirectory", File.class ); reports = reportsDir.listFiles( new FilenameFilter() { public boolean accept( File file, String s ) { return s.startsWith( "BUILD" ); } } ); logger.println( "found reports:" + Arrays.asList( reports ) ); final BuildJobXpp3Reader reader = new BuildJobXpp3Reader(); final MavenInvokerResults mavenInvokerResults = new MavenInvokerResults(); // TODO // saveReports for ( File f : reports ) { InputStream is = new FileInputStream( f ); try { BuildJob buildJob = reader.read( is ); MavenInvokerResult mavenInvokerResult = MavenInvokerRecorder.map( buildJob ); logger.println( "mavenInvokerResult:" + mavenInvokerResult ); mavenInvokerResults.mavenInvokerResults.add( mavenInvokerResult ); } catch ( XmlPullParserException e ) { e.printStackTrace( listener.fatalError( "failed to parse report" ) ); build.setResult( Result.FAILURE ); return true; } finally { IOUtils.closeQuietly( is ); } } int failedCount = build.execute( new MavenBuildProxy.BuildCallable<Integer, IOException>() { private static final long serialVersionUID = 1L; public Integer call( MavenBuild build ) throws IOException, IOException, InterruptedException { - FilePath[] reportsPaths = - MavenInvokerRecorder.locateReports( build.getWorkspace(), reportsDir.getName() + "BUILD*.xml" ); + FilePath[] reportsPaths = MavenInvokerRecorder.locateReports( build.getWorkspace(), + buildDirectory + "/" + + reportsDir.getName() + + "/BUILD*.xml" ); FilePath backupDirectory = MavenInvokerRecorder.getMavenInvokerReportsDirectory( build ); MavenInvokerRecorder.saveReports( backupDirectory, reportsPaths ); InvokerReport invokerReport = new InvokerReport( build, mavenInvokerResults ); build.getActions().add( invokerReport ); int failed = invokerReport.getFailedTestCount(); return failed; } } ); return true; } catch ( ComponentConfigurationException e ) { e.printStackTrace( listener.fatalError( "failed to find report directory" ) ); build.setResult( Result.FAILURE ); return true; } } @Extension public static final class DescriptorImpl extends MavenReporterDescriptor { @Override public String getDisplayName() { // FIXME i18n return "Maven Invoker Plugin Results"; } @Override public MavenReporter newAutoInstance( MavenModule module ) { return new MavenInvokerArchiver(); } } private static final long serialVersionUID = 1L; }
false
true
public boolean postExecute( MavenBuildProxy build, MavenProject pom, MojoInfo mojo, BuildListener listener, Throwable error ) throws InterruptedException, IOException { if ( !mojo.is( "org.apache.maven.plugins", "maven-invoker-plugin", "run" ) && !mojo.is( "org.apache.maven.plugins", "maven-invoker-plugin", "integration-test" ) ) { return true; } final PrintStream logger = listener.getLogger(); logger.println( "MavenInvokerArchiver" ); File[] reports = new File[0]; try { final File reportsDir = mojo.getConfigurationValue( "reportsDirectory", File.class ); reports = reportsDir.listFiles( new FilenameFilter() { public boolean accept( File file, String s ) { return s.startsWith( "BUILD" ); } } ); logger.println( "found reports:" + Arrays.asList( reports ) ); final BuildJobXpp3Reader reader = new BuildJobXpp3Reader(); final MavenInvokerResults mavenInvokerResults = new MavenInvokerResults(); // TODO // saveReports for ( File f : reports ) { InputStream is = new FileInputStream( f ); try { BuildJob buildJob = reader.read( is ); MavenInvokerResult mavenInvokerResult = MavenInvokerRecorder.map( buildJob ); logger.println( "mavenInvokerResult:" + mavenInvokerResult ); mavenInvokerResults.mavenInvokerResults.add( mavenInvokerResult ); } catch ( XmlPullParserException e ) { e.printStackTrace( listener.fatalError( "failed to parse report" ) ); build.setResult( Result.FAILURE ); return true; } finally { IOUtils.closeQuietly( is ); } } int failedCount = build.execute( new MavenBuildProxy.BuildCallable<Integer, IOException>() { private static final long serialVersionUID = 1L; public Integer call( MavenBuild build ) throws IOException, IOException, InterruptedException { FilePath[] reportsPaths = MavenInvokerRecorder.locateReports( build.getWorkspace(), reportsDir.getName() + "BUILD*.xml" ); FilePath backupDirectory = MavenInvokerRecorder.getMavenInvokerReportsDirectory( build ); MavenInvokerRecorder.saveReports( backupDirectory, reportsPaths ); InvokerReport invokerReport = new InvokerReport( build, mavenInvokerResults ); build.getActions().add( invokerReport ); int failed = invokerReport.getFailedTestCount(); return failed; } } ); return true; } catch ( ComponentConfigurationException e ) { e.printStackTrace( listener.fatalError( "failed to find report directory" ) ); build.setResult( Result.FAILURE ); return true; } }
public boolean postExecute( MavenBuildProxy build, MavenProject pom, MojoInfo mojo, BuildListener listener, Throwable error ) throws InterruptedException, IOException { if ( !mojo.is( "org.apache.maven.plugins", "maven-invoker-plugin", "run" ) && !mojo.is( "org.apache.maven.plugins", "maven-invoker-plugin", "integration-test" ) ) { return true; } final String buildDirectory = new File( pom.getBuild().getDirectory() ).getName(); final PrintStream logger = listener.getLogger(); logger.println( "MavenInvokerArchiver" ); File[] reports = new File[0]; try { final File reportsDir = mojo.getConfigurationValue( "reportsDirectory", File.class ); reports = reportsDir.listFiles( new FilenameFilter() { public boolean accept( File file, String s ) { return s.startsWith( "BUILD" ); } } ); logger.println( "found reports:" + Arrays.asList( reports ) ); final BuildJobXpp3Reader reader = new BuildJobXpp3Reader(); final MavenInvokerResults mavenInvokerResults = new MavenInvokerResults(); // TODO // saveReports for ( File f : reports ) { InputStream is = new FileInputStream( f ); try { BuildJob buildJob = reader.read( is ); MavenInvokerResult mavenInvokerResult = MavenInvokerRecorder.map( buildJob ); logger.println( "mavenInvokerResult:" + mavenInvokerResult ); mavenInvokerResults.mavenInvokerResults.add( mavenInvokerResult ); } catch ( XmlPullParserException e ) { e.printStackTrace( listener.fatalError( "failed to parse report" ) ); build.setResult( Result.FAILURE ); return true; } finally { IOUtils.closeQuietly( is ); } } int failedCount = build.execute( new MavenBuildProxy.BuildCallable<Integer, IOException>() { private static final long serialVersionUID = 1L; public Integer call( MavenBuild build ) throws IOException, IOException, InterruptedException { FilePath[] reportsPaths = MavenInvokerRecorder.locateReports( build.getWorkspace(), buildDirectory + "/" + reportsDir.getName() + "/BUILD*.xml" ); FilePath backupDirectory = MavenInvokerRecorder.getMavenInvokerReportsDirectory( build ); MavenInvokerRecorder.saveReports( backupDirectory, reportsPaths ); InvokerReport invokerReport = new InvokerReport( build, mavenInvokerResults ); build.getActions().add( invokerReport ); int failed = invokerReport.getFailedTestCount(); return failed; } } ); return true; } catch ( ComponentConfigurationException e ) { e.printStackTrace( listener.fatalError( "failed to find report directory" ) ); build.setResult( Result.FAILURE ); return true; } }
diff --git a/integration-tests/src/test/java/org/torquebox/integration/arquillian/rails2/FrozenTest.java b/integration-tests/src/test/java/org/torquebox/integration/arquillian/rails2/FrozenTest.java index 55274f7a7..0e9125f8a 100644 --- a/integration-tests/src/test/java/org/torquebox/integration/arquillian/rails2/FrozenTest.java +++ b/integration-tests/src/test/java/org/torquebox/integration/arquillian/rails2/FrozenTest.java @@ -1,72 +1,72 @@ /* * Copyright 2008-2011 Red Hat, Inc, and individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.torquebox.integration.arquillian.rails2; import static org.junit.Assert.*; import java.util.List; import org.jboss.arquillian.api.Deployment; import org.jboss.arquillian.api.Run; import org.jboss.arquillian.api.RunModeType; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.junit.Test; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.torquebox.integration.arquillian.AbstractIntegrationTestCase; @Run(RunModeType.AS_CLIENT) public class FrozenTest extends AbstractIntegrationTestCase { private static final String[] GEM_NAMES = { "railties", "activesupport", "actionpack", "activerecord", "actionmailer", "activeresource", }; @Deployment public static JavaArchive createDeployment() { return createDeployment( "rails2/frozen-knob.yml" ); } @Test public void testHighLevel() { driver.get( "http://localhost:8080/frozen-rails" ); - // System.err.println("RESULT: "); - // System.err.println(driver.getPageSource()); + //System.err.println("RESULT: "); + //System.err.println(driver.getPageSource()); WebElement element = driver.findElementById( "success" ); assertNotNull( element ); assertEquals( "frozen-rails", element.getAttribute( "class" ) ); List<WebElement> elements = driver.findElements( By.className( "load_path_element" ) ); for (WebElement each : elements) { String pathElement = each.getText(); // Ensure that the mentioned gems are loaded absolutely from our // frozen // vendored Rails, and not from system gems. Inspect the paths for // known elements that indicate frozenness. for (int i = 0; i < GEM_NAMES.length; ++i) { if (pathElement.contains( "/" + GEM_NAMES[i] + "/lib" )) { String regex = "^.*frozen.*vendor/rails/" + GEM_NAMES[i] + "/lib.*$"; assert (pathElement.matches( regex ) ); } } } } }
true
true
public void testHighLevel() { driver.get( "http://localhost:8080/frozen-rails" ); // System.err.println("RESULT: "); // System.err.println(driver.getPageSource()); WebElement element = driver.findElementById( "success" ); assertNotNull( element ); assertEquals( "frozen-rails", element.getAttribute( "class" ) ); List<WebElement> elements = driver.findElements( By.className( "load_path_element" ) ); for (WebElement each : elements) { String pathElement = each.getText(); // Ensure that the mentioned gems are loaded absolutely from our // frozen // vendored Rails, and not from system gems. Inspect the paths for // known elements that indicate frozenness. for (int i = 0; i < GEM_NAMES.length; ++i) { if (pathElement.contains( "/" + GEM_NAMES[i] + "/lib" )) { String regex = "^.*frozen.*vendor/rails/" + GEM_NAMES[i] + "/lib.*$"; assert (pathElement.matches( regex ) ); } } } }
public void testHighLevel() { driver.get( "http://localhost:8080/frozen-rails" ); //System.err.println("RESULT: "); //System.err.println(driver.getPageSource()); WebElement element = driver.findElementById( "success" ); assertNotNull( element ); assertEquals( "frozen-rails", element.getAttribute( "class" ) ); List<WebElement> elements = driver.findElements( By.className( "load_path_element" ) ); for (WebElement each : elements) { String pathElement = each.getText(); // Ensure that the mentioned gems are loaded absolutely from our // frozen // vendored Rails, and not from system gems. Inspect the paths for // known elements that indicate frozenness. for (int i = 0; i < GEM_NAMES.length; ++i) { if (pathElement.contains( "/" + GEM_NAMES[i] + "/lib" )) { String regex = "^.*frozen.*vendor/rails/" + GEM_NAMES[i] + "/lib.*$"; assert (pathElement.matches( regex ) ); } } } }
diff --git a/src/com/android/settings/UserDictionarySettings.java b/src/com/android/settings/UserDictionarySettings.java index 6ffcb3d6..49363a90 100644 --- a/src/com/android/settings/UserDictionarySettings.java +++ b/src/com/android/settings/UserDictionarySettings.java @@ -1,286 +1,286 @@ /** * Copyright (C) 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.android.settings; import android.app.AlertDialog; import android.app.Dialog; import android.app.ListActivity; import android.content.Context; import android.content.DialogInterface; import android.database.Cursor; import android.os.Bundle; import android.provider.UserDictionary; import android.text.InputType; import android.view.ContextMenu; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.WindowManager; import android.view.ContextMenu.ContextMenuInfo; import android.widget.AlphabetIndexer; import android.widget.EditText; import android.widget.ListAdapter; import android.widget.ListView; import android.widget.SectionIndexer; import android.widget.SimpleCursorAdapter; import android.widget.TextView; import android.widget.AdapterView.AdapterContextMenuInfo; import java.util.Locale; public class UserDictionarySettings extends ListActivity { private static final String INSTANCE_KEY_DIALOG_EDITING_WORD = "DIALOG_EDITING_WORD"; private static final String INSTANCE_KEY_ADDED_WORD = "DIALOG_ADDED_WORD"; private static final String[] QUERY_PROJECTION = { UserDictionary.Words._ID, UserDictionary.Words.WORD }; // Either the locale is empty (means the word is applicable to all locales) // or the word equals our current locale private static final String QUERY_SELECTION = UserDictionary.Words.LOCALE + "=? OR " + UserDictionary.Words.LOCALE + " is null"; private static final String DELETE_SELECTION = UserDictionary.Words.WORD + "=?"; private static final String EXTRA_WORD = "word"; private static final int CONTEXT_MENU_EDIT = Menu.FIRST; private static final int CONTEXT_MENU_DELETE = Menu.FIRST + 1; private static final int OPTIONS_MENU_ADD = Menu.FIRST; private static final int DIALOG_ADD_OR_EDIT = 0; /** The word being edited in the dialog (null means the user is adding a word). */ private String mDialogEditingWord; private Cursor mCursor; private boolean mAddedWordAlready; private boolean mAutoReturn; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.list_content_with_empty_view); mCursor = createCursor(); setListAdapter(createAdapter()); TextView emptyView = (TextView) findViewById(R.id.empty); emptyView.setText(R.string.user_dict_settings_empty_text); ListView listView = getListView(); listView.setFastScrollEnabled(true); listView.setEmptyView(emptyView); registerForContextMenu(listView); } @Override protected void onResume() { super.onResume(); if (!mAddedWordAlready && getIntent().getAction().equals("com.android.settings.USER_DICTIONARY_INSERT")) { String word = getIntent().getStringExtra(EXTRA_WORD); mAutoReturn = true; if (word != null) { showAddOrEditDialog(word); } } } @Override protected void onRestoreInstanceState(Bundle state) { super.onRestoreInstanceState(state); mDialogEditingWord = state.getString(INSTANCE_KEY_DIALOG_EDITING_WORD); mAddedWordAlready = state.getBoolean(INSTANCE_KEY_ADDED_WORD, false); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putString(INSTANCE_KEY_DIALOG_EDITING_WORD, mDialogEditingWord); outState.putBoolean(INSTANCE_KEY_ADDED_WORD, mAddedWordAlready); } private Cursor createCursor() { String currentLocale = Locale.getDefault().toString(); // Case-insensitive sort return managedQuery(UserDictionary.Words.CONTENT_URI, QUERY_PROJECTION, QUERY_SELECTION, new String[] { currentLocale }, "UPPER(" + UserDictionary.Words.WORD + ")"); } private ListAdapter createAdapter() { return new MyAdapter(this, android.R.layout.simple_list_item_1, mCursor, new String[] { UserDictionary.Words.WORD }, new int[] { android.R.id.text1 }); } @Override protected void onListItemClick(ListView l, View v, int position, long id) { openContextMenu(v); } @Override public void onCreateContextMenu(ContextMenu menu, View v, ContextMenuInfo menuInfo) { if (!(menuInfo instanceof AdapterContextMenuInfo)) return; AdapterContextMenuInfo adapterMenuInfo = (AdapterContextMenuInfo) menuInfo; menu.setHeaderTitle(getWord(adapterMenuInfo.position)); menu.add(0, CONTEXT_MENU_EDIT, 0, R.string.user_dict_settings_context_menu_edit_title); menu.add(0, CONTEXT_MENU_DELETE, 0, R.string.user_dict_settings_context_menu_delete_title); } @Override public boolean onContextItemSelected(MenuItem item) { ContextMenuInfo menuInfo = item.getMenuInfo(); if (!(menuInfo instanceof AdapterContextMenuInfo)) return false; AdapterContextMenuInfo adapterMenuInfo = (AdapterContextMenuInfo) menuInfo; String word = getWord(adapterMenuInfo.position); if (word == null) return true; switch (item.getItemId()) { case CONTEXT_MENU_DELETE: deleteWord(word); return true; case CONTEXT_MENU_EDIT: showAddOrEditDialog(word); return true; } return false; } @Override public boolean onCreateOptionsMenu(Menu menu) { menu.add(0, OPTIONS_MENU_ADD, 0, R.string.user_dict_settings_add_menu_title) .setIcon(R.drawable.ic_menu_add); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { showAddOrEditDialog(null); return true; } private void showAddOrEditDialog(String editingWord) { mDialogEditingWord = editingWord; showDialog(DIALOG_ADD_OR_EDIT); } private String getWord(int position) { mCursor.moveToPosition(position); // Handle a possible race-condition if (mCursor.isAfterLast()) return null; return mCursor.getString( mCursor.getColumnIndexOrThrow(UserDictionary.Words.WORD)); } @Override protected Dialog onCreateDialog(int id) { View content = getLayoutInflater().inflate(R.layout.dialog_edittext, null); final EditText editText = (EditText) content.findViewById(R.id.edittext); // No prediction in soft keyboard mode. TODO: Create a better way to disable prediction editText.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_FLAG_AUTO_COMPLETE); AlertDialog dialog = new AlertDialog.Builder(this) .setTitle(mDialogEditingWord != null ? R.string.user_dict_settings_edit_dialog_title : R.string.user_dict_settings_add_dialog_title) .setView(content) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { onAddOrEditFinished(editText.getText().toString()); if (mAutoReturn) finish(); }}) .setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { if (mAutoReturn) finish(); }}) .create(); - dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE | + dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN | WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE); return dialog; } @Override protected void onPrepareDialog(int id, Dialog d) { AlertDialog dialog = (AlertDialog) d; d.setTitle(mDialogEditingWord != null ? R.string.user_dict_settings_edit_dialog_title : R.string.user_dict_settings_add_dialog_title); EditText editText = (EditText) dialog.findViewById(R.id.edittext); editText.setText(mDialogEditingWord); } private void onAddOrEditFinished(String word) { if (mDialogEditingWord != null) { // The user was editing a word, so do a delete/add deleteWord(mDialogEditingWord); } // Disallow duplicates deleteWord(word); // TODO: present UI for picking whether to add word to all locales, or current. UserDictionary.Words.addWord(this, word.toString(), 250, UserDictionary.Words.LOCALE_TYPE_ALL); mCursor.requery(); mAddedWordAlready = true; } private void deleteWord(String word) { getContentResolver().delete(UserDictionary.Words.CONTENT_URI, DELETE_SELECTION, new String[] { word }); } private static class MyAdapter extends SimpleCursorAdapter implements SectionIndexer { private AlphabetIndexer mIndexer; public MyAdapter(Context context, int layout, Cursor c, String[] from, int[] to) { super(context, layout, c, from, to); int wordColIndex = c.getColumnIndexOrThrow(UserDictionary.Words.WORD); String alphabet = context.getString(com.android.internal.R.string.fast_scroll_alphabet); mIndexer = new AlphabetIndexer(c, wordColIndex, alphabet); } public int getPositionForSection(int section) { return mIndexer.getPositionForSection(section); } public int getSectionForPosition(int position) { return mIndexer.getSectionForPosition(position); } public Object[] getSections() { return mIndexer.getSections(); } } }
true
true
protected Dialog onCreateDialog(int id) { View content = getLayoutInflater().inflate(R.layout.dialog_edittext, null); final EditText editText = (EditText) content.findViewById(R.id.edittext); // No prediction in soft keyboard mode. TODO: Create a better way to disable prediction editText.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_FLAG_AUTO_COMPLETE); AlertDialog dialog = new AlertDialog.Builder(this) .setTitle(mDialogEditingWord != null ? R.string.user_dict_settings_edit_dialog_title : R.string.user_dict_settings_add_dialog_title) .setView(content) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { onAddOrEditFinished(editText.getText().toString()); if (mAutoReturn) finish(); }}) .setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { if (mAutoReturn) finish(); }}) .create(); dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE | WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE); return dialog; }
protected Dialog onCreateDialog(int id) { View content = getLayoutInflater().inflate(R.layout.dialog_edittext, null); final EditText editText = (EditText) content.findViewById(R.id.edittext); // No prediction in soft keyboard mode. TODO: Create a better way to disable prediction editText.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_FLAG_AUTO_COMPLETE); AlertDialog dialog = new AlertDialog.Builder(this) .setTitle(mDialogEditingWord != null ? R.string.user_dict_settings_edit_dialog_title : R.string.user_dict_settings_add_dialog_title) .setView(content) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { onAddOrEditFinished(editText.getText().toString()); if (mAutoReturn) finish(); }}) .setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { if (mAutoReturn) finish(); }}) .create(); dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN | WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE); return dialog; }
diff --git a/src/main/java/com/raccoonfink/cruisemonkey/controllers/EventRestService.java b/src/main/java/com/raccoonfink/cruisemonkey/controllers/EventRestService.java index 7cbc271..36316ef 100644 --- a/src/main/java/com/raccoonfink/cruisemonkey/controllers/EventRestService.java +++ b/src/main/java/com/raccoonfink/cruisemonkey/controllers/EventRestService.java @@ -1,137 +1,138 @@ package com.raccoonfink.cruisemonkey.controllers; import java.util.Date; import java.util.List; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; import org.springframework.util.Assert; import com.raccoonfink.cruisemonkey.model.Event; import com.raccoonfink.cruisemonkey.model.Favorite; import com.raccoonfink.cruisemonkey.server.EventService; import com.sun.jersey.api.core.InjectParam; import com.sun.jersey.api.spring.Autowire; @Component @Scope("request") @Path("/events") @Autowire public class EventRestService extends RestServiceBase implements InitializingBean { final Logger m_logger = LoggerFactory.getLogger(EventRestService.class); @InjectParam("eventService") @Autowired EventService m_eventService; @Context UriInfo m_uriInfo; public EventRestService() {} public EventRestService(@InjectParam("eventService") final EventService eventService) { super(); m_eventService = eventService; } @Override public void afterPropertiesSet() throws Exception { super.afterPropertiesSet(); Assert.notNull(m_eventService); } @GET @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @Transactional(readOnly=true) public List<Event> getEvents(@QueryParam("start") final Date start, @QueryParam("end") final Date end, @QueryParam("user") final String userName) { m_logger.debug("start = {}, end = {}, user = {}", start, end, userName); final List<Event> events; if (start != null && end != null) { events = m_eventService.getEventsInRange(start, end, userName); } else { events = m_eventService.getEvents(userName); } return events; } @GET @Path("/{id}") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @Transactional(readOnly=true) public Event getEvent(@PathParam("id") final String id) { m_logger.debug("id = {}", id); return m_eventService.getEvent(id); } @PUT @Path("/{id}") @Transactional public Response putEvent(@PathParam("id") final String eventId, @QueryParam("isPublic") final Boolean isPublic) { final String userName = getCurrentUser(); m_logger.debug("user = {}, event = {}, isPublic = {}", userName, eventId, isPublic); if (userName == null || eventId == null || isPublic == null) { return Response.serverError().build(); } final Event event = m_eventService.getEvent(eventId); if (event == null) { m_logger.debug("Trying to modify an event that's not in the database!"); return Response.notModified().build(); } else { - if (event.getOwner().getUsername() != userName) { + if (!event.getOwner().getUsername().equals(userName)) { + m_logger.debug("owner = {}, username = {}", event.getOwner().getUsername(), userName); throw new WebApplicationException(401); } event.setIsPublic(isPublic); m_eventService.putEvent(event); return Response.seeOther(getRedirectUri(m_uriInfo)).build(); } } @DELETE @Path("/{id}") @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @Transactional(readOnly=true) public void deleteEvent(@PathParam("id") final String id) { m_logger.debug("id = {}", id); final String user = getCurrentUser(); final Event event = m_eventService.getEvent(id); if (event == null) { m_logger.debug("Trying to delete an event that's not in the database!"); } else { if (event.getOwner().getUsername() != user) { throw new WebApplicationException(401); } m_eventService.deleteEvent(event); } } @POST @Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @Transactional public Response putEvent(final Event event) { m_logger.debug("event = {}", event); m_eventService.putEvent(event, getCurrentUser()); return Response.seeOther(getRedirectUri(m_uriInfo, event.getId())).build(); } }
true
true
public Response putEvent(@PathParam("id") final String eventId, @QueryParam("isPublic") final Boolean isPublic) { final String userName = getCurrentUser(); m_logger.debug("user = {}, event = {}, isPublic = {}", userName, eventId, isPublic); if (userName == null || eventId == null || isPublic == null) { return Response.serverError().build(); } final Event event = m_eventService.getEvent(eventId); if (event == null) { m_logger.debug("Trying to modify an event that's not in the database!"); return Response.notModified().build(); } else { if (event.getOwner().getUsername() != userName) { throw new WebApplicationException(401); } event.setIsPublic(isPublic); m_eventService.putEvent(event); return Response.seeOther(getRedirectUri(m_uriInfo)).build(); } }
public Response putEvent(@PathParam("id") final String eventId, @QueryParam("isPublic") final Boolean isPublic) { final String userName = getCurrentUser(); m_logger.debug("user = {}, event = {}, isPublic = {}", userName, eventId, isPublic); if (userName == null || eventId == null || isPublic == null) { return Response.serverError().build(); } final Event event = m_eventService.getEvent(eventId); if (event == null) { m_logger.debug("Trying to modify an event that's not in the database!"); return Response.notModified().build(); } else { if (!event.getOwner().getUsername().equals(userName)) { m_logger.debug("owner = {}, username = {}", event.getOwner().getUsername(), userName); throw new WebApplicationException(401); } event.setIsPublic(isPublic); m_eventService.putEvent(event); return Response.seeOther(getRedirectUri(m_uriInfo)).build(); } }
diff --git a/project2/src/hmm/UnsupervisedTrainingAlgorithm.java b/project2/src/hmm/UnsupervisedTrainingAlgorithm.java index 53ad0ef..c47c355 100644 --- a/project2/src/hmm/UnsupervisedTrainingAlgorithm.java +++ b/project2/src/hmm/UnsupervisedTrainingAlgorithm.java @@ -1,248 +1,248 @@ package hmm; import java.util.ArrayList; import java.util.List; import java.util.Random; public class UnsupervisedTrainingAlgorithm { private static Random random=new Random(3); public static OptimizedStateCollection train(ArrayList<ArrayList<String>> trainingSentenceStrings, int stateCount){ OptimizedStateCollection hmm=new OptimizedStateCollection(); hmm.states.remove(hmm.unknownState().name); // initialize states for (int i=0; i<stateCount; i++){ hmm.getStateTraining("s"+i); } // setup random state transistion probabilities for (OptimizedState a: hmm.getStates()){ for (OptimizedState b: hmm.getStates()){ a.setNextStateProbability(b, BigDouble.valueOf(random.nextDouble())); } } ArrayList<ArrayList<Word>> trainingSentences=new ArrayList<ArrayList<Word>>(); // read all words for (List<String> list: trainingSentenceStrings){ ArrayList<Word> sentence=new ArrayList<Word>(); for (String word: list){ String[] parts=word.split("/"); sentence.add(hmm.getWordTraining(parts[0])); } trainingSentences.add(sentence); } // setup random word emission probabilities for (OptimizedState a: hmm.getStates()) for (Word w: hmm.words.values()) a.setWordEmissionProbability(w, BigDouble.valueOf(random.nextDouble())); // clear transitions to the start for (OptimizedState a: hmm.getStates()){ a.setNextStateProbability(hmm.startState(), BigDouble.ZERO); } // normalize all probabilities for (OptimizedState a: hmm.getStates()) a.normalize(); BigDouble oldProbability=BigDouble.valueOf(-1); BigDouble probability=BigDouble.ZERO; // optimization loop do{ // create forward and backward algorithms ArrayList<ForwardAlgorithm<OptimizedStateCollection, OptimizedState>> alphas=new ArrayList<ForwardAlgorithm<OptimizedStateCollection,OptimizedState>>(); ArrayList<BackwardAlgorithm<OptimizedStateCollection, OptimizedState>> betas=new ArrayList<BackwardAlgorithm<OptimizedStateCollection,OptimizedState>>(); //System.out.print(hmm); for (List<Word> list: trainingSentences){ ForwardAlgorithm<OptimizedStateCollection, OptimizedState> alpha = new ForwardAlgorithm<OptimizedStateCollection, OptimizedState>(hmm,list); alphas.add(alpha); BackwardAlgorithm<OptimizedStateCollection, OptimizedState> beta = new BackwardAlgorithm<OptimizedStateCollection, OptimizedState>(hmm, list); betas.add(beta); BigDouble a=alpha.getFinalProbability(); BigDouble b=beta.getFinalProbability(); if (a.add(b).compareTo(BigDouble.ZERO)!=0){ if ((a.subtract(b).abs().divide(a.add(b).abs()).compareTo(BigDouble.valueOf(1e-5))>0)) throw new Error("Alpha and Beta do not match"); } } // calculate the probability of the input under the current HMM probability=BigDouble.ONE; { int i=0; for (List<Word> list: trainingSentences){ probability=probability.multiply(alphas.get(i).getFinalProbability()); i++; } } System.out.println(probability); //System.out.println(hmm); // optimize while the probability of the output increases by at least 10 percent if (oldProbability.multiply(BigDouble.valueOf(1.1)).compareTo( - probability) > 0 || true) + probability) > 0) return hmm; oldProbability=probability; // update probabilities of a new HMM (M-step) OptimizedStateCollection newHmm=copyStateCollection(hmm); // set transition probabilities for (OptimizedState a: hmm.getStates()){ if (a==hmm.endState()) continue; OptimizedState newA=newHmm.getState(a.name); BigDouble denominator=BigDouble.ZERO; for (OptimizedState b: hmm.getStates()){ if (b==hmm.startState()) continue; OptimizedState newB=newHmm.getState(b.name); // calculate the numerator BigDouble numerator=BigDouble.ZERO; int i=0; for (List<Word> sentence: trainingSentences){ for( int t=-1; t<sentence.size(); t++){ BigDouble d= xi(t,sentence,a,b,alphas.get(i),betas.get(i),hmm) .multiply(BigDouble.valueOf(sentence.size())); //weight by sentence length numerator=numerator.add(d); denominator=denominator.add(d); } i++; } newA.setNextStateProbability(newB, numerator); } // normalize with the denominator // this can only be done after iteration over all b's above if (denominator.compareTo(BigDouble.ZERO)==0) denominator=BigDouble.ONE; for (OptimizedState b: hmm.getStates()){ if (b==hmm.startState()) continue; OptimizedState newB=newHmm.getState(b.name); newA.setNextStateProbability(newB, newA.nextStateProbability(newB).divide(denominator)); } } // set emission probabilities for (OptimizedState a: hmm.getStates()){ if (a==hmm.endState()) continue; if (a==hmm.startState()) continue; OptimizedState newA=newHmm.getState(a.name); // calculate the denominator BigDouble denominator=BigDouble.ZERO; { int i=0; for (List<Word> sentence: trainingSentences){ int t=0; for( Word w: sentence){ denominator=denominator.add( gamma(t,a,alphas.get(i),betas.get(i)) .multiply(BigDouble.valueOf(sentence.size()))); //weight by sentence length t++; } i++; } } if (denominator.compareTo(BigDouble.ZERO)==0) denominator=BigDouble.ONE; // set the probabilities for all words for (Word word: hmm.words.values()){ BigDouble numerator=BigDouble.ZERO; int i=0; for (List<Word> sentence: trainingSentences){ int t=0; for( Word w: sentence){ if (w==word) numerator=numerator.add( gamma(t,a,alphas.get(i),betas.get(i)) .multiply(BigDouble.valueOf(sentence.size()))); //weighten by sentence length t++; } i++; } newA.setWordEmissionProbability(word, numerator.divide(denominator)); } } hmm=newHmm; //return hmm; } while (true); // return statement is located above // never reached } /** * Formulae according to "Speech and language processing" by Daniel Jurafsky and James H. Martin * Xi=Probability of traversing from state i to state j at time t * Xi_t(i,j)=P(q_t=i,q_{t+1}=j | O,\lambda) * =\frac{\alpha_t(i)a_{ij}b_j(o_{t+1})\beta_{t+1}(j)}{\alpha_T(N)} * * @param t input position * @param sentence input sentence * @param a state i * @param b state j * @param alpha forward probability * @param beta backward probability * @param hmm * @return */ private static BigDouble xi(int t, List<Word> sentence, OptimizedState a, OptimizedState b, ForwardAlgorithm<OptimizedStateCollection,OptimizedState> alpha, BackwardAlgorithm<OptimizedStateCollection,OptimizedState> beta, OptimizedStateCollection hmm){ BigDouble result=alpha.get(t, a); result=result.multiply(a.nextStateProbability(b)); if ((t+1)<sentence.size()) result=result.multiply(b.wordEmittingProbability(sentence.get(t+1))); result=result.multiply(beta.get(t+1,b)); if (alpha.getFinalProbability().compareTo(BigDouble.ZERO)!=0) result=result.divide(alpha.getFinalProbability()); return result; } /** * Formulae according to "Speech and language processing" by Daniel Jurafsky and James H. Martin * Gamma==probability of beeing in state j at time t * @param t time * @param w word at time t * @param a state j * @param alpha * @param beta * @return */ private static BigDouble gamma(int t, OptimizedState a, ForwardAlgorithm<OptimizedStateCollection,OptimizedState> alpha, BackwardAlgorithm<OptimizedStateCollection,OptimizedState> beta){ return alpha.get(t,a) .multiply(beta.get(t,a)) .divide(alpha.getFinalProbability()); } private static OptimizedStateCollection copyStateCollection(OptimizedStateCollection other){ OptimizedStateCollection hmm=new OptimizedStateCollection(); // clear data hmm.words.clear(); hmm.states.clear(); // add states for (OptimizedState s: other.states.values()){ hmm.getStateTraining(s.name); } // add words for (Word w: other.words.values()){ hmm.words.put(w.name,w); } return hmm; } }
true
true
public static OptimizedStateCollection train(ArrayList<ArrayList<String>> trainingSentenceStrings, int stateCount){ OptimizedStateCollection hmm=new OptimizedStateCollection(); hmm.states.remove(hmm.unknownState().name); // initialize states for (int i=0; i<stateCount; i++){ hmm.getStateTraining("s"+i); } // setup random state transistion probabilities for (OptimizedState a: hmm.getStates()){ for (OptimizedState b: hmm.getStates()){ a.setNextStateProbability(b, BigDouble.valueOf(random.nextDouble())); } } ArrayList<ArrayList<Word>> trainingSentences=new ArrayList<ArrayList<Word>>(); // read all words for (List<String> list: trainingSentenceStrings){ ArrayList<Word> sentence=new ArrayList<Word>(); for (String word: list){ String[] parts=word.split("/"); sentence.add(hmm.getWordTraining(parts[0])); } trainingSentences.add(sentence); } // setup random word emission probabilities for (OptimizedState a: hmm.getStates()) for (Word w: hmm.words.values()) a.setWordEmissionProbability(w, BigDouble.valueOf(random.nextDouble())); // clear transitions to the start for (OptimizedState a: hmm.getStates()){ a.setNextStateProbability(hmm.startState(), BigDouble.ZERO); } // normalize all probabilities for (OptimizedState a: hmm.getStates()) a.normalize(); BigDouble oldProbability=BigDouble.valueOf(-1); BigDouble probability=BigDouble.ZERO; // optimization loop do{ // create forward and backward algorithms ArrayList<ForwardAlgorithm<OptimizedStateCollection, OptimizedState>> alphas=new ArrayList<ForwardAlgorithm<OptimizedStateCollection,OptimizedState>>(); ArrayList<BackwardAlgorithm<OptimizedStateCollection, OptimizedState>> betas=new ArrayList<BackwardAlgorithm<OptimizedStateCollection,OptimizedState>>(); //System.out.print(hmm); for (List<Word> list: trainingSentences){ ForwardAlgorithm<OptimizedStateCollection, OptimizedState> alpha = new ForwardAlgorithm<OptimizedStateCollection, OptimizedState>(hmm,list); alphas.add(alpha); BackwardAlgorithm<OptimizedStateCollection, OptimizedState> beta = new BackwardAlgorithm<OptimizedStateCollection, OptimizedState>(hmm, list); betas.add(beta); BigDouble a=alpha.getFinalProbability(); BigDouble b=beta.getFinalProbability(); if (a.add(b).compareTo(BigDouble.ZERO)!=0){ if ((a.subtract(b).abs().divide(a.add(b).abs()).compareTo(BigDouble.valueOf(1e-5))>0)) throw new Error("Alpha and Beta do not match"); } } // calculate the probability of the input under the current HMM probability=BigDouble.ONE; { int i=0; for (List<Word> list: trainingSentences){ probability=probability.multiply(alphas.get(i).getFinalProbability()); i++; } } System.out.println(probability); //System.out.println(hmm); // optimize while the probability of the output increases by at least 10 percent if (oldProbability.multiply(BigDouble.valueOf(1.1)).compareTo( probability) > 0 || true) return hmm; oldProbability=probability; // update probabilities of a new HMM (M-step) OptimizedStateCollection newHmm=copyStateCollection(hmm); // set transition probabilities for (OptimizedState a: hmm.getStates()){ if (a==hmm.endState()) continue; OptimizedState newA=newHmm.getState(a.name); BigDouble denominator=BigDouble.ZERO; for (OptimizedState b: hmm.getStates()){ if (b==hmm.startState()) continue; OptimizedState newB=newHmm.getState(b.name); // calculate the numerator BigDouble numerator=BigDouble.ZERO; int i=0; for (List<Word> sentence: trainingSentences){ for( int t=-1; t<sentence.size(); t++){ BigDouble d= xi(t,sentence,a,b,alphas.get(i),betas.get(i),hmm) .multiply(BigDouble.valueOf(sentence.size())); //weight by sentence length numerator=numerator.add(d); denominator=denominator.add(d); } i++; } newA.setNextStateProbability(newB, numerator); } // normalize with the denominator // this can only be done after iteration over all b's above if (denominator.compareTo(BigDouble.ZERO)==0) denominator=BigDouble.ONE; for (OptimizedState b: hmm.getStates()){ if (b==hmm.startState()) continue; OptimizedState newB=newHmm.getState(b.name); newA.setNextStateProbability(newB, newA.nextStateProbability(newB).divide(denominator)); } } // set emission probabilities for (OptimizedState a: hmm.getStates()){ if (a==hmm.endState()) continue; if (a==hmm.startState()) continue; OptimizedState newA=newHmm.getState(a.name); // calculate the denominator BigDouble denominator=BigDouble.ZERO; { int i=0; for (List<Word> sentence: trainingSentences){ int t=0; for( Word w: sentence){ denominator=denominator.add( gamma(t,a,alphas.get(i),betas.get(i)) .multiply(BigDouble.valueOf(sentence.size()))); //weight by sentence length t++; } i++; } } if (denominator.compareTo(BigDouble.ZERO)==0) denominator=BigDouble.ONE; // set the probabilities for all words for (Word word: hmm.words.values()){ BigDouble numerator=BigDouble.ZERO; int i=0; for (List<Word> sentence: trainingSentences){ int t=0; for( Word w: sentence){ if (w==word) numerator=numerator.add( gamma(t,a,alphas.get(i),betas.get(i)) .multiply(BigDouble.valueOf(sentence.size()))); //weighten by sentence length t++; } i++; } newA.setWordEmissionProbability(word, numerator.divide(denominator)); } } hmm=newHmm; //return hmm; } while (true); // return statement is located above // never reached }
public static OptimizedStateCollection train(ArrayList<ArrayList<String>> trainingSentenceStrings, int stateCount){ OptimizedStateCollection hmm=new OptimizedStateCollection(); hmm.states.remove(hmm.unknownState().name); // initialize states for (int i=0; i<stateCount; i++){ hmm.getStateTraining("s"+i); } // setup random state transistion probabilities for (OptimizedState a: hmm.getStates()){ for (OptimizedState b: hmm.getStates()){ a.setNextStateProbability(b, BigDouble.valueOf(random.nextDouble())); } } ArrayList<ArrayList<Word>> trainingSentences=new ArrayList<ArrayList<Word>>(); // read all words for (List<String> list: trainingSentenceStrings){ ArrayList<Word> sentence=new ArrayList<Word>(); for (String word: list){ String[] parts=word.split("/"); sentence.add(hmm.getWordTraining(parts[0])); } trainingSentences.add(sentence); } // setup random word emission probabilities for (OptimizedState a: hmm.getStates()) for (Word w: hmm.words.values()) a.setWordEmissionProbability(w, BigDouble.valueOf(random.nextDouble())); // clear transitions to the start for (OptimizedState a: hmm.getStates()){ a.setNextStateProbability(hmm.startState(), BigDouble.ZERO); } // normalize all probabilities for (OptimizedState a: hmm.getStates()) a.normalize(); BigDouble oldProbability=BigDouble.valueOf(-1); BigDouble probability=BigDouble.ZERO; // optimization loop do{ // create forward and backward algorithms ArrayList<ForwardAlgorithm<OptimizedStateCollection, OptimizedState>> alphas=new ArrayList<ForwardAlgorithm<OptimizedStateCollection,OptimizedState>>(); ArrayList<BackwardAlgorithm<OptimizedStateCollection, OptimizedState>> betas=new ArrayList<BackwardAlgorithm<OptimizedStateCollection,OptimizedState>>(); //System.out.print(hmm); for (List<Word> list: trainingSentences){ ForwardAlgorithm<OptimizedStateCollection, OptimizedState> alpha = new ForwardAlgorithm<OptimizedStateCollection, OptimizedState>(hmm,list); alphas.add(alpha); BackwardAlgorithm<OptimizedStateCollection, OptimizedState> beta = new BackwardAlgorithm<OptimizedStateCollection, OptimizedState>(hmm, list); betas.add(beta); BigDouble a=alpha.getFinalProbability(); BigDouble b=beta.getFinalProbability(); if (a.add(b).compareTo(BigDouble.ZERO)!=0){ if ((a.subtract(b).abs().divide(a.add(b).abs()).compareTo(BigDouble.valueOf(1e-5))>0)) throw new Error("Alpha and Beta do not match"); } } // calculate the probability of the input under the current HMM probability=BigDouble.ONE; { int i=0; for (List<Word> list: trainingSentences){ probability=probability.multiply(alphas.get(i).getFinalProbability()); i++; } } System.out.println(probability); //System.out.println(hmm); // optimize while the probability of the output increases by at least 10 percent if (oldProbability.multiply(BigDouble.valueOf(1.1)).compareTo( probability) > 0) return hmm; oldProbability=probability; // update probabilities of a new HMM (M-step) OptimizedStateCollection newHmm=copyStateCollection(hmm); // set transition probabilities for (OptimizedState a: hmm.getStates()){ if (a==hmm.endState()) continue; OptimizedState newA=newHmm.getState(a.name); BigDouble denominator=BigDouble.ZERO; for (OptimizedState b: hmm.getStates()){ if (b==hmm.startState()) continue; OptimizedState newB=newHmm.getState(b.name); // calculate the numerator BigDouble numerator=BigDouble.ZERO; int i=0; for (List<Word> sentence: trainingSentences){ for( int t=-1; t<sentence.size(); t++){ BigDouble d= xi(t,sentence,a,b,alphas.get(i),betas.get(i),hmm) .multiply(BigDouble.valueOf(sentence.size())); //weight by sentence length numerator=numerator.add(d); denominator=denominator.add(d); } i++; } newA.setNextStateProbability(newB, numerator); } // normalize with the denominator // this can only be done after iteration over all b's above if (denominator.compareTo(BigDouble.ZERO)==0) denominator=BigDouble.ONE; for (OptimizedState b: hmm.getStates()){ if (b==hmm.startState()) continue; OptimizedState newB=newHmm.getState(b.name); newA.setNextStateProbability(newB, newA.nextStateProbability(newB).divide(denominator)); } } // set emission probabilities for (OptimizedState a: hmm.getStates()){ if (a==hmm.endState()) continue; if (a==hmm.startState()) continue; OptimizedState newA=newHmm.getState(a.name); // calculate the denominator BigDouble denominator=BigDouble.ZERO; { int i=0; for (List<Word> sentence: trainingSentences){ int t=0; for( Word w: sentence){ denominator=denominator.add( gamma(t,a,alphas.get(i),betas.get(i)) .multiply(BigDouble.valueOf(sentence.size()))); //weight by sentence length t++; } i++; } } if (denominator.compareTo(BigDouble.ZERO)==0) denominator=BigDouble.ONE; // set the probabilities for all words for (Word word: hmm.words.values()){ BigDouble numerator=BigDouble.ZERO; int i=0; for (List<Word> sentence: trainingSentences){ int t=0; for( Word w: sentence){ if (w==word) numerator=numerator.add( gamma(t,a,alphas.get(i),betas.get(i)) .multiply(BigDouble.valueOf(sentence.size()))); //weighten by sentence length t++; } i++; } newA.setWordEmissionProbability(word, numerator.divide(denominator)); } } hmm=newHmm; //return hmm; } while (true); // return statement is located above // never reached }
diff --git a/src/test/java/org/abqjug/LoadCacheTest.java b/src/test/java/org/abqjug/LoadCacheTest.java index 66c4fe3..3fd8dd2 100644 --- a/src/test/java/org/abqjug/LoadCacheTest.java +++ b/src/test/java/org/abqjug/LoadCacheTest.java @@ -1,120 +1,120 @@ package org.abqjug; import com.google.common.base.Function; import com.google.common.base.Objects; import com.google.common.cache.*; import com.google.common.collect.Lists; import org.testng.annotations.Test; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import static org.fest.assertions.Assertions.assertThat; public class LoadCacheTest { public class Key { private int value; public Key(int value) { this.value = value; } public int getValue() { return value; } @Override public String toString() { return Objects. toStringHelper(this) .add("value", value).toString(); } @Override public boolean equals(Object object) { return object instanceof Key && Objects.equal(value, ((Key) object) .getValue()); } @Override public int hashCode() { return Objects.hashCode(value, 13); } } public class Result { private int value; public Result(int value) { this.value = value; } public int getValue() { return value; } @Override public String toString() { return Objects.toStringHelper(this).add("value", value).toString(); } @Override public boolean equals(Object object) { return object instanceof Key && Objects.equal(value, ((Key) object).getValue()); } @Override public int hashCode() { return Objects.hashCode(value, 23); } } public class MyRemovalListener implements RemovalListener<Key, Result> { public void onRemoval(RemovalNotification<Key, Result> keyGraphRemovalNotification) { System.out.println(keyGraphRemovalNotification.getCause()); System.out.println(keyGraphRemovalNotification.getKey()); } } public Result createResult(Key key) { return new Result(key.getValue() * 2); } @Test public void testLoadingCache() throws ExecutionException { LoadingCache<Key, Result> graphs = CacheBuilder.newBuilder() .concurrencyLevel(4) .maximumSize(10000) .expireAfterWrite(10, TimeUnit.MINUTES) .expireAfterAccess(10, TimeUnit.MINUTES) .initialCapacity(50) .weakKeys() .weakValues() - .softValues() +// .softValues() .removalListener(new MyRemovalListener()) .build( new CacheLoader<Key, Result>() { public Result load(Key key) { return createResult(key); } }); assertThat(graphs.get(new Key(13)).getValue()).isEqualTo(26); assertThat(graphs.get(new Key(15)).getValue()).isEqualTo(30); System.out.println(graphs.getAll (Lists.transform(Lists.newArrayList(1, 3, 4, 19, 20, 25) , new IntegerToKeyFunction()))); } class IntegerToKeyFunction implements Function<Integer, Key> { public Key apply(Integer value) { return new Key(value); } } }
true
true
public void testLoadingCache() throws ExecutionException { LoadingCache<Key, Result> graphs = CacheBuilder.newBuilder() .concurrencyLevel(4) .maximumSize(10000) .expireAfterWrite(10, TimeUnit.MINUTES) .expireAfterAccess(10, TimeUnit.MINUTES) .initialCapacity(50) .weakKeys() .weakValues() .softValues() .removalListener(new MyRemovalListener()) .build( new CacheLoader<Key, Result>() { public Result load(Key key) { return createResult(key); } }); assertThat(graphs.get(new Key(13)).getValue()).isEqualTo(26); assertThat(graphs.get(new Key(15)).getValue()).isEqualTo(30); System.out.println(graphs.getAll (Lists.transform(Lists.newArrayList(1, 3, 4, 19, 20, 25) , new IntegerToKeyFunction()))); }
public void testLoadingCache() throws ExecutionException { LoadingCache<Key, Result> graphs = CacheBuilder.newBuilder() .concurrencyLevel(4) .maximumSize(10000) .expireAfterWrite(10, TimeUnit.MINUTES) .expireAfterAccess(10, TimeUnit.MINUTES) .initialCapacity(50) .weakKeys() .weakValues() // .softValues() .removalListener(new MyRemovalListener()) .build( new CacheLoader<Key, Result>() { public Result load(Key key) { return createResult(key); } }); assertThat(graphs.get(new Key(13)).getValue()).isEqualTo(26); assertThat(graphs.get(new Key(15)).getValue()).isEqualTo(30); System.out.println(graphs.getAll (Lists.transform(Lists.newArrayList(1, 3, 4, 19, 20, 25) , new IntegerToKeyFunction()))); }
diff --git a/src/oving5/TradeDeal.java b/src/oving5/TradeDeal.java index 1d6cde8..ac26602 100644 --- a/src/oving5/TradeDeal.java +++ b/src/oving5/TradeDeal.java @@ -1,86 +1,86 @@ package oving5; import java.io.Serializable; import java.util.ArrayList; import java.util.List; public class TradeDeal implements Serializable { private static final long serialVersionUID = -6273972612945569502L; //Money to trade for item private final double tradeMoney; //Items to trade for item private final List<TradableItem> tradeItems = new ArrayList<TradableItem>(); //Item trading for private final TradableItem item; private final String trader; private final String buyer; public TradeDeal(TradableItem item, double money, List<TradableItem> items, String seller, String buyer){ this.item = item; this.tradeMoney = money; this.tradeItems.addAll(items); this.trader = seller; this.buyer = buyer; } public TradeDeal(TradableItem item, double money, String seller, String buyer){ this(item, money, new ArrayList<TradableItem>(), seller, buyer); } public double getTradeMoney() { return tradeMoney; } public List<TradableItem> getTradeItems() { return tradeItems; } public TradableItem getItem() { return item; } public String getTrader() { return trader; } public String getBuyer() { return buyer; } /** * Pretty print the current deal, this should only be used when displaying * to user * @return - String representation of this deal */ public String pPrint(){ return this.buyer + " wants to buy " + this.item + " from " + this.trader + " for " + this.tradeMoney + " and these items " + this.tradeItems; } /** * Parse a string which should contain a deal into an TradeDeal object * @param deal - The string representing the deal, must come from the * toString() method of TradeDeal * @return - A TradeDeal */ public static TradeDeal parseDeal(String deal){ String[] splited = deal.trim().split(";"); TradableItem item = TradableItem.parseTradeItem(splited[0]); double money = Double.parseDouble(splited[3]); - String[] li = splited[4].replaceAll("\\[\\]", "").split(","); + String[] li = splited[4].replaceAll("[\\[\\]]", "").split(","); List<TradableItem> list = new ArrayList<TradableItem>(); for(String i : li){ list.add(TradableItem.parseTradeItem(i)); } if(list.isEmpty()) return new TradeDeal(item, money, splited[2], splited[1]); return new TradeDeal(item, money, list, splited[2], splited[1]); } public String toString(){ return this.item + ";" + this.buyer + ";" + this.trader + ";" + this.tradeMoney + ";" + this.tradeItems; } }
true
true
public static TradeDeal parseDeal(String deal){ String[] splited = deal.trim().split(";"); TradableItem item = TradableItem.parseTradeItem(splited[0]); double money = Double.parseDouble(splited[3]); String[] li = splited[4].replaceAll("\\[\\]", "").split(","); List<TradableItem> list = new ArrayList<TradableItem>(); for(String i : li){ list.add(TradableItem.parseTradeItem(i)); } if(list.isEmpty()) return new TradeDeal(item, money, splited[2], splited[1]); return new TradeDeal(item, money, list, splited[2], splited[1]); }
public static TradeDeal parseDeal(String deal){ String[] splited = deal.trim().split(";"); TradableItem item = TradableItem.parseTradeItem(splited[0]); double money = Double.parseDouble(splited[3]); String[] li = splited[4].replaceAll("[\\[\\]]", "").split(","); List<TradableItem> list = new ArrayList<TradableItem>(); for(String i : li){ list.add(TradableItem.parseTradeItem(i)); } if(list.isEmpty()) return new TradeDeal(item, money, splited[2], splited[1]); return new TradeDeal(item, money, list, splited[2], splited[1]); }
diff --git a/sonar-server/src/main/java/org/sonar/server/issue/IssuesFinderSort.java b/sonar-server/src/main/java/org/sonar/server/issue/IssuesFinderSort.java index d1d55be471..a4a31f53be 100644 --- a/sonar-server/src/main/java/org/sonar/server/issue/IssuesFinderSort.java +++ b/sonar-server/src/main/java/org/sonar/server/issue/IssuesFinderSort.java @@ -1,182 +1,182 @@ /* * SonarQube, open source software quality management tool. * Copyright (C) 2008-2013 SonarSource * mailto:contact AT sonarsource DOT com * * SonarQube is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3 of the License, or (at your option) any later version. * * SonarQube is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package org.sonar.server.issue; import com.google.common.base.Function; import com.google.common.collect.Ordering; import org.sonar.api.issue.IssueQuery; import org.sonar.api.rule.Severity; import org.sonar.core.issue.db.IssueDto; import java.util.Collection; import java.util.Date; import java.util.List; class IssuesFinderSort { private List<IssueDto> issues; private IssueQuery query; public IssuesFinderSort(List<IssueDto> issues, IssueQuery query) { this.issues = issues; this.query = query; } public List<IssueDto> sort() { IssueQuery.Sort sort = query.sort(); if (sort != null) { IssueProcessor issueProcessor; switch (sort) { case ASSIGNEE: issueProcessor = new AssigneeSortIssueProcessor(); break; case SEVERITY: issueProcessor = new SeveritySortIssueProcessor(); break; case STATUS: issueProcessor = new StatusSortIssueProcessor(); break; case CREATION_DATE: issueProcessor = new CreationDateSortIssueProcessor(); break; case UPDATE_DATE: issueProcessor = new UpdateDateSortIssueProcessor(); break; case CLOSE_DATE: issueProcessor = new CloseDateSortIssueProcessor(); break; default: - throw new IllegalArgumentException("Cannot sort issues on field : " + query.sort().name()); + throw new IllegalArgumentException("Cannot sort issues on field : " + sort.name()); } return issueProcessor.sort(issues, query.asc()); } return issues; } abstract static class IssueProcessor { abstract Function sortFieldFunction(); abstract Ordering sortFieldOrdering(boolean ascending); final List<IssueDto> sort(Collection<IssueDto> issueDtos, boolean ascending) { Ordering<IssueDto> ordering = sortFieldOrdering(ascending).onResultOf(sortFieldFunction()); return ordering.immutableSortedCopy(issueDtos); } } abstract static class TextSortIssueProcessor extends IssueProcessor { @Override Function sortFieldFunction() { return new Function<IssueDto, String>() { public String apply(IssueDto issueDto) { return sortField(issueDto); } }; } abstract String sortField(IssueDto issueDto); @Override Ordering sortFieldOrdering(boolean ascending) { Ordering<String> ordering = Ordering.from(String.CASE_INSENSITIVE_ORDER).nullsLast(); if (!ascending) { ordering = ordering.reverse(); } return ordering; } } static class AssigneeSortIssueProcessor extends TextSortIssueProcessor { @Override String sortField(IssueDto issueDto) { return issueDto.getAssignee(); } } static class StatusSortIssueProcessor extends TextSortIssueProcessor { @Override String sortField(IssueDto issueDto) { return issueDto.getStatus(); } } static class SeveritySortIssueProcessor extends IssueProcessor { @Override Function sortFieldFunction() { return new Function<IssueDto, Integer>() { public Integer apply(IssueDto issueDto) { return Severity.ALL.indexOf(issueDto.getSeverity()); } }; } @Override Ordering sortFieldOrdering(boolean ascending) { Ordering<Integer> ordering = Ordering.<Integer>natural().nullsLast(); if (!ascending) { ordering = ordering.reverse(); } return ordering; } } abstract static class DateSortRowProcessor extends IssueProcessor { @Override Function sortFieldFunction() { return new Function<IssueDto, Date>() { public Date apply(IssueDto issueDto) { return sortField(issueDto); } }; } abstract Date sortField(IssueDto issueDto); @Override Ordering sortFieldOrdering(boolean ascending) { Ordering<Date> ordering = Ordering.<Date>natural().nullsLast(); if (!ascending) { ordering = ordering.reverse(); } return ordering; } } static class CreationDateSortIssueProcessor extends DateSortRowProcessor { @Override Date sortField(IssueDto issueDto) { return issueDto.getIssueCreationDate(); } } static class UpdateDateSortIssueProcessor extends DateSortRowProcessor { @Override Date sortField(IssueDto issueDto) { return issueDto.getIssueUpdateDate(); } } static class CloseDateSortIssueProcessor extends DateSortRowProcessor { @Override Date sortField(IssueDto issueDto) { return issueDto.getIssueCloseDate(); } } }
true
true
public List<IssueDto> sort() { IssueQuery.Sort sort = query.sort(); if (sort != null) { IssueProcessor issueProcessor; switch (sort) { case ASSIGNEE: issueProcessor = new AssigneeSortIssueProcessor(); break; case SEVERITY: issueProcessor = new SeveritySortIssueProcessor(); break; case STATUS: issueProcessor = new StatusSortIssueProcessor(); break; case CREATION_DATE: issueProcessor = new CreationDateSortIssueProcessor(); break; case UPDATE_DATE: issueProcessor = new UpdateDateSortIssueProcessor(); break; case CLOSE_DATE: issueProcessor = new CloseDateSortIssueProcessor(); break; default: throw new IllegalArgumentException("Cannot sort issues on field : " + query.sort().name()); } return issueProcessor.sort(issues, query.asc()); } return issues; }
public List<IssueDto> sort() { IssueQuery.Sort sort = query.sort(); if (sort != null) { IssueProcessor issueProcessor; switch (sort) { case ASSIGNEE: issueProcessor = new AssigneeSortIssueProcessor(); break; case SEVERITY: issueProcessor = new SeveritySortIssueProcessor(); break; case STATUS: issueProcessor = new StatusSortIssueProcessor(); break; case CREATION_DATE: issueProcessor = new CreationDateSortIssueProcessor(); break; case UPDATE_DATE: issueProcessor = new UpdateDateSortIssueProcessor(); break; case CLOSE_DATE: issueProcessor = new CloseDateSortIssueProcessor(); break; default: throw new IllegalArgumentException("Cannot sort issues on field : " + sort.name()); } return issueProcessor.sort(issues, query.asc()); } return issues; }
diff --git a/src/com/gildorym/charactercards/CharacterCards.java b/src/com/gildorym/charactercards/CharacterCards.java index f04a34b..1a637d7 100644 --- a/src/com/gildorym/charactercards/CharacterCards.java +++ b/src/com/gildorym/charactercards/CharacterCards.java @@ -1,89 +1,91 @@ package com.gildorym.charactercards; import java.util.HashMap; import java.util.Map; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.entity.Player; import org.bukkit.event.Listener; import org.bukkit.plugin.java.JavaPlugin; import com.gildorym.basicchar.BasicChar; public class CharacterCards extends JavaPlugin { private Map<String, CharacterCard> characterCards = new HashMap<String, CharacterCard>(); /** * @return the characterCards */ public Map<String, CharacterCard> getCharacterCards() { return characterCards; } public void onEnable() { SaveDataManager.loadData(this); this.registerListeners(new Listener[] { new PlayerInteractEntityListener(this), new PlayerDeathListener(this), new EntityRegainHealthListener(), }); this.getServer().getScheduler().scheduleSyncRepeatingTask(this, new Runnable() { @Override public void run() { for (Player player : Bukkit.getServer().getOnlinePlayers()) { BasicChar basicChar = (BasicChar) Bukkit.getServer().getPluginManager().getPlugin("BasicChar"); if (basicChar.levels.get(player.getName()) != null) { Integer maxHealth = (int) (5 + Math.floor(basicChar.levels.get(player.getName()) / 5)); - if (CharacterCards.this.getCharacterCards().get(player.getName()).getRace() == Race.ELF) { - maxHealth -= 1; - } - if (CharacterCards.this.getCharacterCards().get(player.getName()).getRace() == Race.DWARF) { - maxHealth += 1; - } - if (CharacterCards.this.getCharacterCards().get(player.getName()).getRace() == Race.GNOME) { - maxHealth += 1; - } - if (CharacterCards.this.getCharacterCards().get(player.getName()).getHealth() < maxHealth) { - CharacterCards.this.getCharacterCards().get(player.getName()).setHealth(CharacterCards.this.getCharacterCards().get(player.getName()).getHealth() + 1); + if (CharacterCards.this.getCharacterCards().get(player.getName()) != null) { + if (CharacterCards.this.getCharacterCards().get(player.getName()).getRace() == Race.ELF) { + maxHealth -= 1; + } + if (CharacterCards.this.getCharacterCards().get(player.getName()).getRace() == Race.DWARF) { + maxHealth += 1; + } + if (CharacterCards.this.getCharacterCards().get(player.getName()).getRace() == Race.GNOME) { + maxHealth += 1; + } + if (CharacterCards.this.getCharacterCards().get(player.getName()).getHealth() < maxHealth) { + CharacterCards.this.getCharacterCards().get(player.getName()).setHealth(CharacterCards.this.getCharacterCards().get(player.getName()).getHealth() + 1); + } } } } } }, 7200L, 7200L); this.getServer().getScheduler().scheduleSyncRepeatingTask(this, new Runnable() { @Override public void run() { for (Player player : Bukkit.getServer().getOnlinePlayers()) { if (player.getHealth() < player.getMaxHealth() && player.isSleeping()) { player.setHealth(player.getHealth() + Math.min(Math.round((float) player.getMaxHealth() / 20F), player.getMaxHealth() - player.getHealth())); player.sendMessage(ChatColor.GREEN + "" + ChatColor.ITALIC + "You feel a little more refreshed from sleep. +" + Math.min(Math.round((float) player.getMaxHealth() / 20F), player.getMaxHealth() - player.getHealth()) + "HP"); } } } }, 2400L, 2400L); this.getCommand("setage").setExecutor(new SetAgeCommand(this)); this.getCommand("setgender").setExecutor(new SetGenderCommand(this)); this.getCommand("setrace").setExecutor(new SetRaceCommand(this)); this.getCommand("setinfo").setExecutor(new SetInfoCommand(this)); this.getCommand("addinfo").setExecutor(new AddInfoCommand(this)); this.getCommand("char").setExecutor(new CharCommand(this)); this.getCommand("takehit").setExecutor(new TakeHitCommand(this)); } public void onDisable() { SaveDataManager.saveData(this); } private void registerListeners(Listener... listeners) { for (Listener listener : listeners) { this.getServer().getPluginManager().registerEvents(listener, this); } } }
true
true
public void onEnable() { SaveDataManager.loadData(this); this.registerListeners(new Listener[] { new PlayerInteractEntityListener(this), new PlayerDeathListener(this), new EntityRegainHealthListener(), }); this.getServer().getScheduler().scheduleSyncRepeatingTask(this, new Runnable() { @Override public void run() { for (Player player : Bukkit.getServer().getOnlinePlayers()) { BasicChar basicChar = (BasicChar) Bukkit.getServer().getPluginManager().getPlugin("BasicChar"); if (basicChar.levels.get(player.getName()) != null) { Integer maxHealth = (int) (5 + Math.floor(basicChar.levels.get(player.getName()) / 5)); if (CharacterCards.this.getCharacterCards().get(player.getName()).getRace() == Race.ELF) { maxHealth -= 1; } if (CharacterCards.this.getCharacterCards().get(player.getName()).getRace() == Race.DWARF) { maxHealth += 1; } if (CharacterCards.this.getCharacterCards().get(player.getName()).getRace() == Race.GNOME) { maxHealth += 1; } if (CharacterCards.this.getCharacterCards().get(player.getName()).getHealth() < maxHealth) { CharacterCards.this.getCharacterCards().get(player.getName()).setHealth(CharacterCards.this.getCharacterCards().get(player.getName()).getHealth() + 1); } } } } }, 7200L, 7200L); this.getServer().getScheduler().scheduleSyncRepeatingTask(this, new Runnable() { @Override public void run() { for (Player player : Bukkit.getServer().getOnlinePlayers()) { if (player.getHealth() < player.getMaxHealth() && player.isSleeping()) { player.setHealth(player.getHealth() + Math.min(Math.round((float) player.getMaxHealth() / 20F), player.getMaxHealth() - player.getHealth())); player.sendMessage(ChatColor.GREEN + "" + ChatColor.ITALIC + "You feel a little more refreshed from sleep. +" + Math.min(Math.round((float) player.getMaxHealth() / 20F), player.getMaxHealth() - player.getHealth()) + "HP"); } } } }, 2400L, 2400L); this.getCommand("setage").setExecutor(new SetAgeCommand(this)); this.getCommand("setgender").setExecutor(new SetGenderCommand(this)); this.getCommand("setrace").setExecutor(new SetRaceCommand(this)); this.getCommand("setinfo").setExecutor(new SetInfoCommand(this)); this.getCommand("addinfo").setExecutor(new AddInfoCommand(this)); this.getCommand("char").setExecutor(new CharCommand(this)); this.getCommand("takehit").setExecutor(new TakeHitCommand(this)); }
public void onEnable() { SaveDataManager.loadData(this); this.registerListeners(new Listener[] { new PlayerInteractEntityListener(this), new PlayerDeathListener(this), new EntityRegainHealthListener(), }); this.getServer().getScheduler().scheduleSyncRepeatingTask(this, new Runnable() { @Override public void run() { for (Player player : Bukkit.getServer().getOnlinePlayers()) { BasicChar basicChar = (BasicChar) Bukkit.getServer().getPluginManager().getPlugin("BasicChar"); if (basicChar.levels.get(player.getName()) != null) { Integer maxHealth = (int) (5 + Math.floor(basicChar.levels.get(player.getName()) / 5)); if (CharacterCards.this.getCharacterCards().get(player.getName()) != null) { if (CharacterCards.this.getCharacterCards().get(player.getName()).getRace() == Race.ELF) { maxHealth -= 1; } if (CharacterCards.this.getCharacterCards().get(player.getName()).getRace() == Race.DWARF) { maxHealth += 1; } if (CharacterCards.this.getCharacterCards().get(player.getName()).getRace() == Race.GNOME) { maxHealth += 1; } if (CharacterCards.this.getCharacterCards().get(player.getName()).getHealth() < maxHealth) { CharacterCards.this.getCharacterCards().get(player.getName()).setHealth(CharacterCards.this.getCharacterCards().get(player.getName()).getHealth() + 1); } } } } } }, 7200L, 7200L); this.getServer().getScheduler().scheduleSyncRepeatingTask(this, new Runnable() { @Override public void run() { for (Player player : Bukkit.getServer().getOnlinePlayers()) { if (player.getHealth() < player.getMaxHealth() && player.isSleeping()) { player.setHealth(player.getHealth() + Math.min(Math.round((float) player.getMaxHealth() / 20F), player.getMaxHealth() - player.getHealth())); player.sendMessage(ChatColor.GREEN + "" + ChatColor.ITALIC + "You feel a little more refreshed from sleep. +" + Math.min(Math.round((float) player.getMaxHealth() / 20F), player.getMaxHealth() - player.getHealth()) + "HP"); } } } }, 2400L, 2400L); this.getCommand("setage").setExecutor(new SetAgeCommand(this)); this.getCommand("setgender").setExecutor(new SetGenderCommand(this)); this.getCommand("setrace").setExecutor(new SetRaceCommand(this)); this.getCommand("setinfo").setExecutor(new SetInfoCommand(this)); this.getCommand("addinfo").setExecutor(new AddInfoCommand(this)); this.getCommand("char").setExecutor(new CharCommand(this)); this.getCommand("takehit").setExecutor(new TakeHitCommand(this)); }
diff --git a/core/src/com/google/zxing/qrcode/decoder/DecodedBitStreamParser.java b/core/src/com/google/zxing/qrcode/decoder/DecodedBitStreamParser.java index e69ca8d5..4de854f9 100644 --- a/core/src/com/google/zxing/qrcode/decoder/DecodedBitStreamParser.java +++ b/core/src/com/google/zxing/qrcode/decoder/DecodedBitStreamParser.java @@ -1,235 +1,238 @@ /* * Copyright 2007 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.qrcode.decoder; import com.google.zxing.ReaderException; import java.io.UnsupportedEncodingException; /** * <p>QR Codes can encode text as bits in one of several modes, and can use multiple modes * in one QR Code. This class decodes the bits back into text.</p> * * <p>See ISO 18004:2006, 6.4.3 - 6.4.7</p> * * @author [email protected] (Sean Owen) */ final class DecodedBitStreamParser { /** * See ISO 18004:2006, 6.4.4 Table 5 */ private static final char[] ALPHANUMERIC_CHARS = new char[]{ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', ' ', '$', '%', '*', '+', '-', '.', '/', ':' }; private static final String SHIFT_JIS = "Shift_JIS"; private static final boolean ASSUME_SHIFT_JIS; private static final String UTF8 = "UTF-8"; private static final String ISO88591 = "ISO-8859-1"; static { String platformDefault = System.getProperty("file.encoding"); ASSUME_SHIFT_JIS = SHIFT_JIS.equalsIgnoreCase(platformDefault) || "EUC-JP".equalsIgnoreCase(platformDefault); } private DecodedBitStreamParser() { } static String decode(byte[] bytes, Version version) throws ReaderException { BitSource bits = new BitSource(bytes); StringBuffer result = new StringBuffer(); Mode mode; do { // While still another segment to read... mode = Mode.forBits(bits.readBits(4)); // mode is encoded by 4 bits if (!mode.equals(Mode.TERMINATOR)) { // How many characters will follow, encoded in this mode? int count = bits.readBits(mode.getCharacterCountBits(version)); if (mode.equals(Mode.NUMERIC)) { decodeNumericSegment(bits, result, count); } else if (mode.equals(Mode.ALPHANUMERIC)) { decodeAlphanumericSegment(bits, result, count); } else if (mode.equals(Mode.BYTE)) { decodeByteSegment(bits, result, count); } else if (mode.equals(Mode.KANJI)) { decodeKanjiSegment(bits, result, count); } else { throw new ReaderException("Unsupported mode indicator"); } } } while (!mode.equals(Mode.TERMINATOR)); // I thought it wasn't allowed to leave extra bytes after the terminator but it happens /* int bitsLeft = bits.available(); if (bitsLeft > 0) { if (bitsLeft > 6 || bits.readBits(bitsLeft) != 0) { throw new ReaderException("Excess bits or non-zero bits after terminator mode indicator"); } } */ return result.toString(); } private static void decodeKanjiSegment(BitSource bits, StringBuffer result, int count) throws ReaderException { // Each character will require 2 bytes. Read the characters as 2-byte pairs // and decode as Shift_JIS afterwards byte[] buffer = new byte[2 * count]; int offset = 0; while (count > 0) { // Each 13 bits encodes a 2-byte character int twoBytes = bits.readBits(13); int assembledTwoBytes = ((twoBytes / 0x0C0) << 8) | (twoBytes % 0x0C0); if (assembledTwoBytes < 0x01F00) { // In the 0x8140 to 0x9FFC range assembledTwoBytes += 0x08140; } else { // In the 0xE040 to 0xEBBF range assembledTwoBytes += 0x0C140; } buffer[offset] = (byte) (assembledTwoBytes >> 8); buffer[offset + 1] = (byte) assembledTwoBytes; offset += 2; count--; } // Shift_JIS may not be supported in some environments: try { result.append(new String(buffer, SHIFT_JIS)); } catch (UnsupportedEncodingException uee) { throw new ReaderException(SHIFT_JIS + " encoding is not supported on this device"); } } private static void decodeByteSegment(BitSource bits, StringBuffer result, int count) throws ReaderException { byte[] readBytes = new byte[count]; if (count << 3 > bits.available()) { throw new ReaderException("Count too large: " + count); } for (int i = 0; i < count; i++) { readBytes[i] = (byte) bits.readBits(8); } // The spec isn't clear on this mode; see // section 6.4.5: t does not say which encoding to assuming // upon decoding. I have seen ISO-8859-1 used as well as // Shift_JIS -- without anything like an ECI designator to // give a hint. String encoding = guessEncoding(readBytes); try { result.append(new String(readBytes, encoding)); } catch (UnsupportedEncodingException uce) { throw new ReaderException(uce.toString()); } } private static void decodeAlphanumericSegment(BitSource bits, StringBuffer result, int count) { // Read two characters at a time while (count > 1) { int nextTwoCharsBits = bits.readBits(11); result.append(ALPHANUMERIC_CHARS[nextTwoCharsBits / 45]); result.append(ALPHANUMERIC_CHARS[nextTwoCharsBits % 45]); count -= 2; } if (count == 1) { // special case: one character left result.append(ALPHANUMERIC_CHARS[bits.readBits(6)]); } } private static void decodeNumericSegment(BitSource bits, StringBuffer result, int count) throws ReaderException { // Read three digits at a time while (count >= 3) { // Each 10 bits encodes three digits int threeDigitsBits = bits.readBits(10); if (threeDigitsBits >= 1000) { throw new ReaderException("Illegal value for 3-digit unit: " + threeDigitsBits); } result.append(ALPHANUMERIC_CHARS[threeDigitsBits / 100]); result.append(ALPHANUMERIC_CHARS[(threeDigitsBits / 10) % 10]); result.append(ALPHANUMERIC_CHARS[threeDigitsBits % 10]); count -= 3; } if (count == 2) { // Two digits left over to read, encoded in 7 bits int twoDigitsBits = bits.readBits(7); if (twoDigitsBits >= 100) { throw new ReaderException("Illegal value for 2-digit unit: " + twoDigitsBits); } result.append(ALPHANUMERIC_CHARS[twoDigitsBits / 10]); result.append(ALPHANUMERIC_CHARS[twoDigitsBits % 10]); } else if (count == 1) { // One digit left over to read int digitBits = bits.readBits(4); if (digitBits >= 10) { throw new ReaderException("Illegal value for digit unit: " + digitBits); } result.append(ALPHANUMERIC_CHARS[digitBits]); } } private static String guessEncoding(byte[] bytes) { if (ASSUME_SHIFT_JIS) { return SHIFT_JIS; } // Does it start with the UTF-8 byte order mark? then guess it's UTF-8 if (bytes.length > 3 && bytes[0] == (byte) 0xEF && bytes[1] == (byte) 0xBB && bytes[2] == (byte) 0xBF) { return UTF8; } // For now, merely tries to distinguish ISO-8859-1, UTF-8 and Shift_JIS, // which should be by far the most common encodings. ISO-8859-1 // should not have bytes in the 0x80 - 0x9F range, while Shift_JIS // uses this as a first byte of a two-byte character. If we see this // followed by a valid second byte in Shift_JIS, assume it is Shift_JIS. // If we see something else in that second byte, we'll make the risky guess // that it's UTF-8. int length = bytes.length; + boolean canBeISO88591 = true; for (int i = 0; i < length; i++) { int value = bytes[i] & 0xFF; if (value >= 0x80 && value <= 0x9F && i < length - 1) { + canBeISO88591 = false; // ISO-8859-1 shouldn't use this, but before we decide it is Shift_JIS, // just double check that it is followed by a byte that's valid in // the Shift_JIS encoding int nextValue = bytes[i + 1] & 0xFF; if ((value & 0x1) == 0) { - // if even, - if (nextValue >= 0x9F && nextValue <= 0xFC) { - return SHIFT_JIS; + // if even, next value should be in [0x9F,0xFC] + // if not, we'll guess UTF-8 + if (nextValue < 0x9F || nextValue > 0xFC) { + return UTF8; } } else { - if (nextValue >= 0x40 && nextValue <= 0x9E) { - return SHIFT_JIS; + // if odd, next value should be in [0x40,0x9E] + // if not, we'll guess UTF-8 + if (nextValue < 0x40 || nextValue > 0x9E) { + return UTF8; } } - // otherwise we're going to take a guess that it's UTF-8 - return UTF8; } } - return ISO88591; + return canBeISO88591 ? ISO88591 : SHIFT_JIS; } }
false
true
private static String guessEncoding(byte[] bytes) { if (ASSUME_SHIFT_JIS) { return SHIFT_JIS; } // Does it start with the UTF-8 byte order mark? then guess it's UTF-8 if (bytes.length > 3 && bytes[0] == (byte) 0xEF && bytes[1] == (byte) 0xBB && bytes[2] == (byte) 0xBF) { return UTF8; } // For now, merely tries to distinguish ISO-8859-1, UTF-8 and Shift_JIS, // which should be by far the most common encodings. ISO-8859-1 // should not have bytes in the 0x80 - 0x9F range, while Shift_JIS // uses this as a first byte of a two-byte character. If we see this // followed by a valid second byte in Shift_JIS, assume it is Shift_JIS. // If we see something else in that second byte, we'll make the risky guess // that it's UTF-8. int length = bytes.length; for (int i = 0; i < length; i++) { int value = bytes[i] & 0xFF; if (value >= 0x80 && value <= 0x9F && i < length - 1) { // ISO-8859-1 shouldn't use this, but before we decide it is Shift_JIS, // just double check that it is followed by a byte that's valid in // the Shift_JIS encoding int nextValue = bytes[i + 1] & 0xFF; if ((value & 0x1) == 0) { // if even, if (nextValue >= 0x9F && nextValue <= 0xFC) { return SHIFT_JIS; } } else { if (nextValue >= 0x40 && nextValue <= 0x9E) { return SHIFT_JIS; } } // otherwise we're going to take a guess that it's UTF-8 return UTF8; } } return ISO88591; }
private static String guessEncoding(byte[] bytes) { if (ASSUME_SHIFT_JIS) { return SHIFT_JIS; } // Does it start with the UTF-8 byte order mark? then guess it's UTF-8 if (bytes.length > 3 && bytes[0] == (byte) 0xEF && bytes[1] == (byte) 0xBB && bytes[2] == (byte) 0xBF) { return UTF8; } // For now, merely tries to distinguish ISO-8859-1, UTF-8 and Shift_JIS, // which should be by far the most common encodings. ISO-8859-1 // should not have bytes in the 0x80 - 0x9F range, while Shift_JIS // uses this as a first byte of a two-byte character. If we see this // followed by a valid second byte in Shift_JIS, assume it is Shift_JIS. // If we see something else in that second byte, we'll make the risky guess // that it's UTF-8. int length = bytes.length; boolean canBeISO88591 = true; for (int i = 0; i < length; i++) { int value = bytes[i] & 0xFF; if (value >= 0x80 && value <= 0x9F && i < length - 1) { canBeISO88591 = false; // ISO-8859-1 shouldn't use this, but before we decide it is Shift_JIS, // just double check that it is followed by a byte that's valid in // the Shift_JIS encoding int nextValue = bytes[i + 1] & 0xFF; if ((value & 0x1) == 0) { // if even, next value should be in [0x9F,0xFC] // if not, we'll guess UTF-8 if (nextValue < 0x9F || nextValue > 0xFC) { return UTF8; } } else { // if odd, next value should be in [0x40,0x9E] // if not, we'll guess UTF-8 if (nextValue < 0x40 || nextValue > 0x9E) { return UTF8; } } } } return canBeISO88591 ? ISO88591 : SHIFT_JIS; }
diff --git a/wkingtechrts/src/com/wkingtechrts/mygdxgame/TechGestureListener.java b/wkingtechrts/src/com/wkingtechrts/mygdxgame/TechGestureListener.java index 238ee6e..69cec27 100644 --- a/wkingtechrts/src/com/wkingtechrts/mygdxgame/TechGestureListener.java +++ b/wkingtechrts/src/com/wkingtechrts/mygdxgame/TechGestureListener.java @@ -1,66 +1,66 @@ package com.wkingtechrts.mygdxgame; import com.badlogic.gdx.graphics.OrthographicCamera; import com.badlogic.gdx.input.GestureDetector.GestureListener; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.Gdx; public class TechGestureListener implements GestureListener { private OrthographicCamera cam; public TechGestureListener(OrthographicCamera camera) { this.cam = camera; } @Override public boolean touchDown(float x, float y, int pointer, int button) { // TODO Auto-generated method stub return false; } @Override public boolean tap(float x, float y, int count, int button) { // TODO Auto-generated method stub return false; } @Override public boolean longPress(float x, float y) { // TODO Auto-generated method stub return false; } @Override public boolean fling(float velocityX, float velocityY, int button) { // TODO Auto-generated method stub return false; } @Override public boolean pan(float x, float y, float deltaX, float deltaY) { // TODO Auto-generated method stub cam.position.x += deltaX/(cam.viewportWidth * cam.zoom); - cam.position.y -= deltaY/(cam.viewportWidth * cam.zoom); + cam.position.y -= deltaY/(cam.viewportHeight * cam.zoom); System.out.println("("+x+","+y+") DeltaX:"+deltaX+","+deltaY); cam.update(); Gdx.graphics.requestRendering(); return false; } @Override public boolean zoom(float initialDistance, float distance) { // TODO Auto-generated method stub return false; } @Override public boolean pinch(Vector2 initialPointer1, Vector2 initialPointer2, Vector2 pointer1, Vector2 pointer2) { // TODO Auto-generated method stub return false; } }
true
true
public boolean pan(float x, float y, float deltaX, float deltaY) { // TODO Auto-generated method stub cam.position.x += deltaX/(cam.viewportWidth * cam.zoom); cam.position.y -= deltaY/(cam.viewportWidth * cam.zoom); System.out.println("("+x+","+y+") DeltaX:"+deltaX+","+deltaY); cam.update(); Gdx.graphics.requestRendering(); return false; }
public boolean pan(float x, float y, float deltaX, float deltaY) { // TODO Auto-generated method stub cam.position.x += deltaX/(cam.viewportWidth * cam.zoom); cam.position.y -= deltaY/(cam.viewportHeight * cam.zoom); System.out.println("("+x+","+y+") DeltaX:"+deltaX+","+deltaY); cam.update(); Gdx.graphics.requestRendering(); return false; }
diff --git a/AMBroSIA/src/game/Logic.java b/AMBroSIA/src/game/Logic.java index 195fc72..7c60167 100644 --- a/AMBroSIA/src/game/Logic.java +++ b/AMBroSIA/src/game/Logic.java @@ -1,304 +1,306 @@ package game; import gui.MenuGUI; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.util.Random; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.Level; import org.apache.log4j.Logger; /** * The purpose of the * <code>Logic</code> class is to manage all other classes and methods in such a * way as to produce a playable game. This includes (but is not limited to) * calling for the creation of the main menu, displaying the leaderboard or * starting the game in either single or two player mode as appropriate, * depending on player actions. * * @author Nikolaos, Michael, Anthony * */ public class Logic extends KeyAdapter implements ActionListener { /** * Value of maximum level. */ final public static int MAX_LEVEL = 30; //various essential objects private static GameState gameState; private static ActionListener buttonPress = new Logic(); private static KeyListener keyboard = new Logic(); private static MenuGUI gui; private static GraphicsEngine graphicsEngine; private static Physics physicsEngine; private static timeToLive ttlLogic; private static Collision collisionCheck; private static Progression gameProgress; private static AI gameAI; //fire rate limiter variables private static long initialShootTime; private static long currentShootTime; private static boolean shootKeyReleased = true; private static int shootCounter = 0; //is game paused boolean private boolean paused = false; //the service used to execute all update functions private static ScheduledExecutorService timer; //logger, global logging level private final static Logger log = Logger.getLogger(Logic.class.getName()); /** * Logging level control - from no logging output to full logging. */ public final static Level LOG_LEVEL = Level.OFF; /** * Main method; creates the main menu and acts as appropriate depending on * player input. * * @param args */ public static void main(String args[]) { GameAssets.loadSounds(); //set log configuration to defaults BasicConfigurator.configure(); //create, display gui gui = new MenuGUI(buttonPress, keyboard); gui.showMenu(); log.setLevel(LOG_LEVEL); log.info("GUI has been started"); //background music - different exception handles for jdk6 compatibility GameAssets.theme.playLoop(); } /** * Start the global timer responsible for keeping all game elements up to * date. The timer will use some form of multithreading to execute update * tasks concurrently. */ public static void startTimer() { timer = Executors.newScheduledThreadPool(4); timer.scheduleAtFixedRate(graphicsEngine, 0, 17, TimeUnit.MILLISECONDS); timer.scheduleAtFixedRate(physicsEngine, 0, 17, TimeUnit.MILLISECONDS); timer.scheduleAtFixedRate(collisionCheck, 0, 17, TimeUnit.MILLISECONDS); timer.scheduleAtFixedRate(gui, 0, 17, TimeUnit.MILLISECONDS); timer.scheduleAtFixedRate(ttlLogic, 0, 200, TimeUnit.MILLISECONDS); timer.scheduleAtFixedRate(gameProgress, 0, 1, TimeUnit.SECONDS); timer.scheduleAtFixedRate(gameAI, 0, 500, TimeUnit.MILLISECONDS); } /** * Stops the timer. */ public static void stopTimer() { timer.shutdown(); } /** * ? * * @param command * @param delay * @param unit */ public static void executeTask(Runnable command, long delay, TimeUnit unit) { timer.schedule(command, delay, unit); } /** * Start the single player game. */ public static void startSinglePlayer() { GameAssets.theme.stop(); setUpLevel(false); } /** * Starts the game in 2 player mode. */ public static void startTwoPlayer() { GameAssets.theme.stop(); setUpLevel(true); } /** * Shows tutorial information to the player. */ public static void showTutorial() { } /** * Checks if the game is paused. * * @return true if game is paused, false otherwise */ public boolean isPaused() { return paused; } /** * Displays text that the player has won (as appropriate for single or 2 * player mode). */ public static void displayWinner() { } /** * Displays "Game Over" message. */ public static void displayGameOver() { gui.displayGameOver(gameState); } /** * Display information relevant to player two's turn. */ public static void displayPlayerTwoTurn() { } //set up some game essentials private static void setUpLevel(boolean twoPlayer) { gameState = new GameState(); graphicsEngine = new GraphicsEngine(gameState); physicsEngine = new Physics(gameState); ttlLogic = new timeToLive(gameState); collisionCheck = new Collision(gameState, physicsEngine); gameProgress = new Progression(gameState,twoPlayer); gameAI = new AI(gameState); gameProgress.setupInitialLevel(); } //called whenever a key is pressed (thread seperate from timer) /** * Handles event caused by user key presses. * * @param e */ @Override public void keyPressed(KeyEvent e) { int keyCode = e.getKeyCode(); PlayerShip player = gameState.getPlayerShip(); //handles most basic key commands. Should activate a boolean stating that the key has been pressed if (keyCode == KeyEvent.VK_UP) { //accelerate if (!paused && player != null) { player.accelerate(true); GameAssets.thrusters.playLoop(); } } else if (keyCode == KeyEvent.VK_LEFT) { if (!paused && player != null) { player.turnLeft(true); } } else if (keyCode == KeyEvent.VK_RIGHT) { if (!paused && player != null) { player.turnRight(true); } } else if (keyCode == KeyEvent.VK_DOWN) { if (!paused && player != null) { player.useBomb(); } } else if (keyCode == KeyEvent.VK_SPACE) { - if (shootKeyReleased) { - initialShootTime = System.currentTimeMillis(); - shootKeyReleased = false; - shootCounter = 0; - player.shoot(); - } else if (!shootKeyReleased) { - currentShootTime = System.currentTimeMillis(); - while ((currentShootTime - initialShootTime) > PlayerShip.FIRE_RATE * 1200 && shootCounter < 1) { - player.shootDirection(); - shootCounter++; - initialShootTime = currentShootTime; + if(gameState.getPlayerShip() != null){ + if (shootKeyReleased) { + initialShootTime = System.currentTimeMillis(); + shootKeyReleased = false; + shootCounter = 0; + player.shoot(); + } else if (!shootKeyReleased) { + currentShootTime = System.currentTimeMillis(); + while ((currentShootTime - initialShootTime) > PlayerShip.FIRE_RATE * 1200 && shootCounter < 1) { + player.shootDirection(); + shootCounter++; + initialShootTime = currentShootTime; + } } } } else if (keyCode == KeyEvent.VK_P) { if (!paused) { stopTimer(); paused = true; } else { startTimer(); paused = false; } } else if (keyCode == KeyEvent.VK_B) { if (!paused && player != null) { player.useBomb(); } } if (keyCode == KeyEvent.VK_Z) { Random randu = new Random(); gameState.addAsteroid(new Asteroid(new float[]{1.5f, 1.5f}, randu.nextInt(360), new int[]{randu.nextInt(700), randu.nextInt(500)}, gameState, Asteroid.LARGE_ASTEROID_SIZE)); //gameState.addAsteroid(new Asteroid(new float[]{Difficulty.randomAsteroidVelocity(10), Difficulty.randomHeading()}, randu.nextInt(360), new int[]{randu.nextInt(700), randu.nextInt(500)}, gameState, Asteroid.LARGE_ASTEROID_SIZE)); //gameState.addProjectile(new Projectile(gameState.getAlienShip(), randu.nextInt(360), new int[] {gameState.getAlienShip().getX(), gameState.getAlienShip().getY()}, gameState)); } } //same as keyPressed, except when it is released /** * Handle events caused by release of key. * * @param e */ @Override public void keyReleased(KeyEvent e) { int keyCode = e.getKeyCode(); PlayerShip player = gameState.getPlayerShip(); //stops doing whatever that keypress was doing if (keyCode == KeyEvent.VK_UP) { //accelerate if (player != null) { player.accelerate(false); GameAssets.thrusters.stop(); } } else if (keyCode == KeyEvent.VK_LEFT) { if (player != null) { player.turnLeft(false); } } else if (keyCode == KeyEvent.VK_RIGHT) { if (player != null) { player.turnRight(false); } } else if (keyCode == KeyEvent.VK_SPACE) { shootKeyReleased = true; } } //This section needs a LOT of work.... //called when a gui button is clicked /** * Handles events relating to the user clicking menu buttons. * * @param e */ @Override public void actionPerformed(ActionEvent e) { Object action = e.getSource(); if (action == gui.singlePbutton) { startSinglePlayer(); gui.displaySingleP(gameState); startTimer(); } else if (action == gui.twoPbutton) { startTwoPlayer(); gui.displayTwoP(gameState); startTimer(); } else if (action == gui.leaderBoardButton) { gui.displayLeaderBoard(); } else if (action == gui.tutorialButton) { gui.displayTutorial(); } else if (action == gui.backButton) { gui.goBack(); } else if (e.getSource() == gui.quitButton) { System.exit(0); } } }
true
true
public void keyPressed(KeyEvent e) { int keyCode = e.getKeyCode(); PlayerShip player = gameState.getPlayerShip(); //handles most basic key commands. Should activate a boolean stating that the key has been pressed if (keyCode == KeyEvent.VK_UP) { //accelerate if (!paused && player != null) { player.accelerate(true); GameAssets.thrusters.playLoop(); } } else if (keyCode == KeyEvent.VK_LEFT) { if (!paused && player != null) { player.turnLeft(true); } } else if (keyCode == KeyEvent.VK_RIGHT) { if (!paused && player != null) { player.turnRight(true); } } else if (keyCode == KeyEvent.VK_DOWN) { if (!paused && player != null) { player.useBomb(); } } else if (keyCode == KeyEvent.VK_SPACE) { if (shootKeyReleased) { initialShootTime = System.currentTimeMillis(); shootKeyReleased = false; shootCounter = 0; player.shoot(); } else if (!shootKeyReleased) { currentShootTime = System.currentTimeMillis(); while ((currentShootTime - initialShootTime) > PlayerShip.FIRE_RATE * 1200 && shootCounter < 1) { player.shootDirection(); shootCounter++; initialShootTime = currentShootTime; } } } else if (keyCode == KeyEvent.VK_P) { if (!paused) { stopTimer(); paused = true; } else { startTimer(); paused = false; } } else if (keyCode == KeyEvent.VK_B) { if (!paused && player != null) { player.useBomb(); } } if (keyCode == KeyEvent.VK_Z) { Random randu = new Random(); gameState.addAsteroid(new Asteroid(new float[]{1.5f, 1.5f}, randu.nextInt(360), new int[]{randu.nextInt(700), randu.nextInt(500)}, gameState, Asteroid.LARGE_ASTEROID_SIZE)); //gameState.addAsteroid(new Asteroid(new float[]{Difficulty.randomAsteroidVelocity(10), Difficulty.randomHeading()}, randu.nextInt(360), new int[]{randu.nextInt(700), randu.nextInt(500)}, gameState, Asteroid.LARGE_ASTEROID_SIZE)); //gameState.addProjectile(new Projectile(gameState.getAlienShip(), randu.nextInt(360), new int[] {gameState.getAlienShip().getX(), gameState.getAlienShip().getY()}, gameState)); } }
public void keyPressed(KeyEvent e) { int keyCode = e.getKeyCode(); PlayerShip player = gameState.getPlayerShip(); //handles most basic key commands. Should activate a boolean stating that the key has been pressed if (keyCode == KeyEvent.VK_UP) { //accelerate if (!paused && player != null) { player.accelerate(true); GameAssets.thrusters.playLoop(); } } else if (keyCode == KeyEvent.VK_LEFT) { if (!paused && player != null) { player.turnLeft(true); } } else if (keyCode == KeyEvent.VK_RIGHT) { if (!paused && player != null) { player.turnRight(true); } } else if (keyCode == KeyEvent.VK_DOWN) { if (!paused && player != null) { player.useBomb(); } } else if (keyCode == KeyEvent.VK_SPACE) { if(gameState.getPlayerShip() != null){ if (shootKeyReleased) { initialShootTime = System.currentTimeMillis(); shootKeyReleased = false; shootCounter = 0; player.shoot(); } else if (!shootKeyReleased) { currentShootTime = System.currentTimeMillis(); while ((currentShootTime - initialShootTime) > PlayerShip.FIRE_RATE * 1200 && shootCounter < 1) { player.shootDirection(); shootCounter++; initialShootTime = currentShootTime; } } } } else if (keyCode == KeyEvent.VK_P) { if (!paused) { stopTimer(); paused = true; } else { startTimer(); paused = false; } } else if (keyCode == KeyEvent.VK_B) { if (!paused && player != null) { player.useBomb(); } } if (keyCode == KeyEvent.VK_Z) { Random randu = new Random(); gameState.addAsteroid(new Asteroid(new float[]{1.5f, 1.5f}, randu.nextInt(360), new int[]{randu.nextInt(700), randu.nextInt(500)}, gameState, Asteroid.LARGE_ASTEROID_SIZE)); //gameState.addAsteroid(new Asteroid(new float[]{Difficulty.randomAsteroidVelocity(10), Difficulty.randomHeading()}, randu.nextInt(360), new int[]{randu.nextInt(700), randu.nextInt(500)}, gameState, Asteroid.LARGE_ASTEROID_SIZE)); //gameState.addProjectile(new Projectile(gameState.getAlienShip(), randu.nextInt(360), new int[] {gameState.getAlienShip().getX(), gameState.getAlienShip().getY()}, gameState)); } }
diff --git a/tests/org.eclipse.dltk.ruby.ui.tests/src/org/eclipse/dltk/ruby/ui/tests/text/indenting/IndentingTest.java b/tests/org.eclipse.dltk.ruby.ui.tests/src/org/eclipse/dltk/ruby/ui/tests/text/indenting/IndentingTest.java index 4755a80d..fc0b8ec2 100644 --- a/tests/org.eclipse.dltk.ruby.ui.tests/src/org/eclipse/dltk/ruby/ui/tests/text/indenting/IndentingTest.java +++ b/tests/org.eclipse.dltk.ruby.ui.tests/src/org/eclipse/dltk/ruby/ui/tests/text/indenting/IndentingTest.java @@ -1,183 +1,183 @@ /******************************************************************************* * Copyright (c) 2005, 2007 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * *******************************************************************************/ package org.eclipse.dltk.ruby.ui.tests.text.indenting; import org.eclipse.core.runtime.Assert; import org.eclipse.dltk.core.tests.model.SuiteOfTestCases; import org.eclipse.dltk.ruby.internal.ui.RubyPreferenceConstants; import org.eclipse.dltk.ruby.internal.ui.text.RubyAutoEditStrategy; import org.eclipse.dltk.ruby.internal.ui.text.RubyPartitions; import org.eclipse.dltk.ruby.ui.tests.internal.TestUtils; import org.eclipse.dltk.ui.CodeFormatterConstants; import org.eclipse.jface.preference.PreferenceStore; import org.eclipse.jface.text.DocCmd; import org.eclipse.jface.text.Document; import org.eclipse.jface.text.TextUtilities; public class IndentingTest extends SuiteOfTestCases { public IndentingTest(String name) { super(name); } private static final String PATH = "resources/indenting/"; private RubyAutoEditStrategy tabStrategy, spaceStrategy; protected void setUp() throws Exception { tabStrategy = createStrategy(true); spaceStrategy = createStrategy(false); super.setUp(); } private RubyAutoEditStrategy createStrategy(boolean useTabs) { PreferenceStore store = new PreferenceStore(); store.setValue(CodeFormatterConstants.FORMATTER_TAB_CHAR, (useTabs ? CodeFormatterConstants.TAB : CodeFormatterConstants.SPACE)); RubyPreferenceConstants.initializeDefaultValues(store); String partitioning = RubyPartitions.RUBY_PARTITIONING; RubyAutoEditStrategy result = new RubyAutoEditStrategy(store, partitioning); return result; } public void doTest(String data, RubyAutoEditStrategy strategy) throws Exception { data = data.replaceAll("π", "≤\n≥"); int startPos = data.indexOf("≤"); Assert.isLegal(startPos >= 0); data = data.substring(0, startPos) + data.substring(startPos + 1); - int replacePos = data.indexOf('±'); + int replacePos = data.indexOf("±"); int insertionStartPos = startPos; if (replacePos >= 0) { Assert.isLegal(replacePos >= startPos); data = data.substring(0, replacePos) + data.substring(replacePos + 1); insertionStartPos = replacePos; } int endPos = data.indexOf("≥"); Assert.isLegal(endPos >= 0); Assert.isLegal(replacePos < 0 || endPos >= replacePos); String insertion = data.substring(insertionStartPos, endPos); data = data.substring(0, insertionStartPos) + data.substring(endPos + 1); int expectedPos = data.indexOf("§§"); Assert.isLegal(expectedPos >= 0); String expected = data.substring(expectedPos + 2); data = data.substring(0, expectedPos); Document doc = new Document(data); TestUtils.installStuff(doc); // remove the leading line break from expected String[] legalLineDelimiters = doc.getLegalLineDelimiters(); int index = TextUtilities.startsWith(legalLineDelimiters, expected); Assert.isLegal(index >= 0); expected = expected.substring(legalLineDelimiters[index].length()); int replaceLength = (replacePos < 0 ? 0 : replacePos - startPos); DocCmd cmd = new DocCmd(startPos, replaceLength, insertion); strategy.customizeDocumentCommand(doc, cmd); if (cmd.doit) { // for (Iterator iter = cmd.getCommandIterator(); iter.hasNext(); ) { // Object command = iter.next(); // Method method = command.getClass().getMethod("execute", new Class[] {IDocument.class}); // method.invoke(command, new Object[] {doc}); // } doc.replace(cmd.offset, cmd.length, cmd.text); } assertEquals(expected, doc.get()); } private void magic() throws Exception { String name = getName(); String fileName = name.substring(4, 5).toLowerCase() + name.substring(5) + ".rb"; String data = TestUtils.getData(PATH + fileName); String moreData = data.replaceAll("\t", " "); if (!moreData.equals(data)) doTest(moreData, spaceStrategy); doTest(data, tabStrategy); } public void testNewLineInDef() throws Exception { magic(); } public void testEnterBeforeClass() throws Exception { magic(); } public void testEnterOpensClass() throws Exception { magic(); } public void testIfStatement() throws Exception { magic(); } public void testIfModifier() throws Exception { magic(); } public void testMovingEndToNewLine() throws Exception { magic(); } public void testMovingEndWithWhitespaceToNewLine() throws Exception { magic(); } public void testDeindentingEnd() throws Exception { magic(); } public void testClassNotKeyword() throws Exception { magic(); } public void testNewLineAfterEmptyIndentedLine() throws Exception { magic(); } public void testNewLineInRegularFunction() throws Exception { magic(); } public void testIndentAfterNewLineBeforeParen() throws Exception { magic(); } public void testIndentOnUnclosedParen() throws Exception { magic(); } public void testIndentOnFirstExplicitContinuation() throws Exception { magic(); } public void testIndentOnFirstImplicitContinuation() throws Exception { magic(); } public void testNoIndentOnSubsequentExplicitContinuation() throws Exception { magic(); } public void testNoIndentOnSubsequentImplicitContinuationAfterExplicitOne() throws Exception { magic(); } public void testNoIndentOnSubsequentImplicitContinuationAfterImplicitOne() throws Exception { magic(); } }
true
true
public void doTest(String data, RubyAutoEditStrategy strategy) throws Exception { data = data.replaceAll("π", "≤\n≥"); int startPos = data.indexOf("≤"); Assert.isLegal(startPos >= 0); data = data.substring(0, startPos) + data.substring(startPos + 1); int replacePos = data.indexOf('±'); int insertionStartPos = startPos; if (replacePos >= 0) { Assert.isLegal(replacePos >= startPos); data = data.substring(0, replacePos) + data.substring(replacePos + 1); insertionStartPos = replacePos; } int endPos = data.indexOf("≥"); Assert.isLegal(endPos >= 0); Assert.isLegal(replacePos < 0 || endPos >= replacePos); String insertion = data.substring(insertionStartPos, endPos); data = data.substring(0, insertionStartPos) + data.substring(endPos + 1); int expectedPos = data.indexOf("§§"); Assert.isLegal(expectedPos >= 0); String expected = data.substring(expectedPos + 2); data = data.substring(0, expectedPos); Document doc = new Document(data); TestUtils.installStuff(doc); // remove the leading line break from expected String[] legalLineDelimiters = doc.getLegalLineDelimiters(); int index = TextUtilities.startsWith(legalLineDelimiters, expected); Assert.isLegal(index >= 0); expected = expected.substring(legalLineDelimiters[index].length()); int replaceLength = (replacePos < 0 ? 0 : replacePos - startPos); DocCmd cmd = new DocCmd(startPos, replaceLength, insertion); strategy.customizeDocumentCommand(doc, cmd); if (cmd.doit) { // for (Iterator iter = cmd.getCommandIterator(); iter.hasNext(); ) { // Object command = iter.next(); // Method method = command.getClass().getMethod("execute", new Class[] {IDocument.class}); // method.invoke(command, new Object[] {doc}); // } doc.replace(cmd.offset, cmd.length, cmd.text); } assertEquals(expected, doc.get()); }
public void doTest(String data, RubyAutoEditStrategy strategy) throws Exception { data = data.replaceAll("π", "≤\n≥"); int startPos = data.indexOf("≤"); Assert.isLegal(startPos >= 0); data = data.substring(0, startPos) + data.substring(startPos + 1); int replacePos = data.indexOf("±"); int insertionStartPos = startPos; if (replacePos >= 0) { Assert.isLegal(replacePos >= startPos); data = data.substring(0, replacePos) + data.substring(replacePos + 1); insertionStartPos = replacePos; } int endPos = data.indexOf("≥"); Assert.isLegal(endPos >= 0); Assert.isLegal(replacePos < 0 || endPos >= replacePos); String insertion = data.substring(insertionStartPos, endPos); data = data.substring(0, insertionStartPos) + data.substring(endPos + 1); int expectedPos = data.indexOf("§§"); Assert.isLegal(expectedPos >= 0); String expected = data.substring(expectedPos + 2); data = data.substring(0, expectedPos); Document doc = new Document(data); TestUtils.installStuff(doc); // remove the leading line break from expected String[] legalLineDelimiters = doc.getLegalLineDelimiters(); int index = TextUtilities.startsWith(legalLineDelimiters, expected); Assert.isLegal(index >= 0); expected = expected.substring(legalLineDelimiters[index].length()); int replaceLength = (replacePos < 0 ? 0 : replacePos - startPos); DocCmd cmd = new DocCmd(startPos, replaceLength, insertion); strategy.customizeDocumentCommand(doc, cmd); if (cmd.doit) { // for (Iterator iter = cmd.getCommandIterator(); iter.hasNext(); ) { // Object command = iter.next(); // Method method = command.getClass().getMethod("execute", new Class[] {IDocument.class}); // method.invoke(command, new Object[] {doc}); // } doc.replace(cmd.offset, cmd.length, cmd.text); } assertEquals(expected, doc.get()); }
diff --git a/plugins/org.fornax.soa.basedsl/src/org/fornax/soa/basedsl/scoping/versions/VersionComparator.java b/plugins/org.fornax.soa.basedsl/src/org/fornax/soa/basedsl/scoping/versions/VersionComparator.java index de690127..f8520d7b 100644 --- a/plugins/org.fornax.soa.basedsl/src/org/fornax/soa/basedsl/scoping/versions/VersionComparator.java +++ b/plugins/org.fornax.soa.basedsl/src/org/fornax/soa/basedsl/scoping/versions/VersionComparator.java @@ -1,85 +1,85 @@ package org.fornax.soa.basedsl.scoping.versions; import org.eclipse.xtext.resource.IEObjectDescription; public class VersionComparator { public static int compare (final String v1, final String v2) { if (v1!=null && v2 == null) return 1; if (v1==null && v2 != null) return -1; if (v1==null && v2==null) return 0; if (v1.equals(v2)) { return 0; } String[] v1Parts = v1.split("\\."); String[] v2Parts = v2.split("\\."); String ver1 = v1; String ver2 = v2; if (v1Parts.length == v2Parts.length + 1) { ver2 = v2 + ".0"; } if (v2Parts.length == v1Parts.length + 1) { ver1 = v1 + ".0"; } String[] ver1Parts = ver1.split("\\."); String[] ver2Parts = ver2.split("\\."); for (int i = 0; i < ver1Parts.length; i++) { if (i < ver2Parts.length) { int classifierCmp = VersionClassifierComparator.compare (ver1Parts[i], ver2Parts[i]); if (classifierCmp != 0) return classifierCmp; int cmp = ver1Parts[i].compareTo(ver2Parts[i]); - if (cmp != 0 && Character.isDigit (ver1Parts[i].charAt (0)) && Character.isDigit (ver2Parts[i].charAt (0))) { + if (cmp != 0 && ver1Parts[i].length () > 0 && Character.isDigit (ver1Parts[i].charAt (0)) && ver2Parts[i].length () > 0 && Character.isDigit (ver2Parts[i].charAt (0))) { return cmp; } - if (cmp != 0 && Character.isDigit (ver1Parts[i].charAt (0)) && !Character.isDigit (ver2Parts[i].charAt (0))) { + if (cmp != 0 && ver1Parts[i].length () > 0 && Character.isDigit (ver1Parts[i].charAt (0)) && ver2Parts[i].length () > 0 && !Character.isDigit (ver2Parts[i].charAt (0))) { if (ver1Parts[i].compareTo ("0") > 0) return 1; else return 0; } - if (cmp != 0 && !Character.isDigit (ver1Parts[i].charAt (0)) && Character.isDigit (ver2Parts[i].charAt (0))) { + if (cmp != 0 && ver1Parts[i].length () > 0 && !Character.isDigit (ver1Parts[i].charAt (0)) && ver2Parts[i].length () > 0 && Character.isDigit (ver2Parts[i].charAt (0))) { if (ver2Parts[i].compareTo ("0") > 0) return 1; else return 0; } - if (cmp != 0 && !Character.isDigit (ver1Parts[i].charAt (0)) && !Character.isDigit (ver2Parts[i].charAt (0))) { + if (cmp != 0 && ver1Parts[i].length () > 0 && !Character.isDigit (ver1Parts[i].charAt (0)) && ver2Parts[i].length () > 0 && !Character.isDigit (ver2Parts[i].charAt (0))) { return VersionClassifierComparator.compare (ver1Parts[i], ver2Parts[i]); } } } if (v1Parts.length == v2Parts.length + 1 || v2Parts.length == v1Parts.length + 1) return 0; if (v1Parts.length > v2Parts.length) { return 1; } return -1; } public static int compare (IEObjectDescription eObjDesc1, IEObjectDescription eObjDesc2, VersionResolver resolver) { if (eObjDesc1 == null && eObjDesc2 != null) return -1; if (eObjDesc1 != null && eObjDesc2 == null) return 1; return compare (resolver.getVersion(eObjDesc1), resolver.getVersion(eObjDesc2)); } public static int compare (final Integer v1, final Integer v2) { return compare(v1.toString(), v2.toString()); } public static int compare (final String v1, final Integer v2) { return compare(v1.toString(), v2.toString()); } public static int compare (final Integer v1, final String v2) { return compare(v1.toString(), v2.toString()); } }
false
true
public static int compare (final String v1, final String v2) { if (v1!=null && v2 == null) return 1; if (v1==null && v2 != null) return -1; if (v1==null && v2==null) return 0; if (v1.equals(v2)) { return 0; } String[] v1Parts = v1.split("\\."); String[] v2Parts = v2.split("\\."); String ver1 = v1; String ver2 = v2; if (v1Parts.length == v2Parts.length + 1) { ver2 = v2 + ".0"; } if (v2Parts.length == v1Parts.length + 1) { ver1 = v1 + ".0"; } String[] ver1Parts = ver1.split("\\."); String[] ver2Parts = ver2.split("\\."); for (int i = 0; i < ver1Parts.length; i++) { if (i < ver2Parts.length) { int classifierCmp = VersionClassifierComparator.compare (ver1Parts[i], ver2Parts[i]); if (classifierCmp != 0) return classifierCmp; int cmp = ver1Parts[i].compareTo(ver2Parts[i]); if (cmp != 0 && Character.isDigit (ver1Parts[i].charAt (0)) && Character.isDigit (ver2Parts[i].charAt (0))) { return cmp; } if (cmp != 0 && Character.isDigit (ver1Parts[i].charAt (0)) && !Character.isDigit (ver2Parts[i].charAt (0))) { if (ver1Parts[i].compareTo ("0") > 0) return 1; else return 0; } if (cmp != 0 && !Character.isDigit (ver1Parts[i].charAt (0)) && Character.isDigit (ver2Parts[i].charAt (0))) { if (ver2Parts[i].compareTo ("0") > 0) return 1; else return 0; } if (cmp != 0 && !Character.isDigit (ver1Parts[i].charAt (0)) && !Character.isDigit (ver2Parts[i].charAt (0))) { return VersionClassifierComparator.compare (ver1Parts[i], ver2Parts[i]); } } } if (v1Parts.length == v2Parts.length + 1 || v2Parts.length == v1Parts.length + 1) return 0; if (v1Parts.length > v2Parts.length) { return 1; } return -1; }
public static int compare (final String v1, final String v2) { if (v1!=null && v2 == null) return 1; if (v1==null && v2 != null) return -1; if (v1==null && v2==null) return 0; if (v1.equals(v2)) { return 0; } String[] v1Parts = v1.split("\\."); String[] v2Parts = v2.split("\\."); String ver1 = v1; String ver2 = v2; if (v1Parts.length == v2Parts.length + 1) { ver2 = v2 + ".0"; } if (v2Parts.length == v1Parts.length + 1) { ver1 = v1 + ".0"; } String[] ver1Parts = ver1.split("\\."); String[] ver2Parts = ver2.split("\\."); for (int i = 0; i < ver1Parts.length; i++) { if (i < ver2Parts.length) { int classifierCmp = VersionClassifierComparator.compare (ver1Parts[i], ver2Parts[i]); if (classifierCmp != 0) return classifierCmp; int cmp = ver1Parts[i].compareTo(ver2Parts[i]); if (cmp != 0 && ver1Parts[i].length () > 0 && Character.isDigit (ver1Parts[i].charAt (0)) && ver2Parts[i].length () > 0 && Character.isDigit (ver2Parts[i].charAt (0))) { return cmp; } if (cmp != 0 && ver1Parts[i].length () > 0 && Character.isDigit (ver1Parts[i].charAt (0)) && ver2Parts[i].length () > 0 && !Character.isDigit (ver2Parts[i].charAt (0))) { if (ver1Parts[i].compareTo ("0") > 0) return 1; else return 0; } if (cmp != 0 && ver1Parts[i].length () > 0 && !Character.isDigit (ver1Parts[i].charAt (0)) && ver2Parts[i].length () > 0 && Character.isDigit (ver2Parts[i].charAt (0))) { if (ver2Parts[i].compareTo ("0") > 0) return 1; else return 0; } if (cmp != 0 && ver1Parts[i].length () > 0 && !Character.isDigit (ver1Parts[i].charAt (0)) && ver2Parts[i].length () > 0 && !Character.isDigit (ver2Parts[i].charAt (0))) { return VersionClassifierComparator.compare (ver1Parts[i], ver2Parts[i]); } } } if (v1Parts.length == v2Parts.length + 1 || v2Parts.length == v1Parts.length + 1) return 0; if (v1Parts.length > v2Parts.length) { return 1; } return -1; }
diff --git a/projects/security-manager/source/java/com/google/enterprise/connector/security/ui/BasicOmniFormCustomization.java b/projects/security-manager/source/java/com/google/enterprise/connector/security/ui/BasicOmniFormCustomization.java index 7ac06c64..0719febf 100644 --- a/projects/security-manager/source/java/com/google/enterprise/connector/security/ui/BasicOmniFormCustomization.java +++ b/projects/security-manager/source/java/com/google/enterprise/connector/security/ui/BasicOmniFormCustomization.java @@ -1,294 +1,294 @@ // Copyright 2009 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.enterprise.connector.security.ui; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.logging.Logger; import java.util.logging.Level; /** * A simple implementation of OmniFormCustomization that can store/read * its configuration to/from a file. */ public class BasicOmniFormCustomization implements OmniFormCustomization { Logger LOGGER = Logger.getLogger(BasicOmniFormCustomization.class.getName()); /** The name of the serialized Properties file. */ private String fileName; /** * The global configuration data corresponding to the * FormGlobalOption data. */ private Map<FormGlobalOption, String> globals = new HashMap<FormGlobalOption, String>(); /** * The pre-credential configuration data corresponding to the * PerCredentialOption data. */ private Map<String, HashMap<PerCredentialOption, String>> groups = new HashMap<String, HashMap<PerCredentialOption, String>>(); /** * Constructor that takes a file name as input and reads the configuration * from that file. Note: the way this is currently written is not atomic - * it may try to read the configuration and encounter some error halfway * through and the internal Map objects may contain only half the * configuration. */ public BasicOmniFormCustomization(String fileName) { this.fileName = fileName; readConfig(); } /** Default constructor for testing purposes. */ BasicOmniFormCustomization() { } /** * Returns a copy of the credential options for the specified credential * group if those options exist, or an empty Map otherwise. */ public Map<PerCredentialOption, String> getCredentialGroupOptions(String credentialGroupName) { if (!groups.containsKey(credentialGroupName)) { return Collections.emptyMap(); } HashMap<PerCredentialOption, String> mapCopy = new HashMap<PerCredentialOption, String>(); mapCopy.putAll(groups.get(credentialGroupName)); return mapCopy; } /** * Returns the global options for the OmniForm customization. */ public Map<FormGlobalOption, String> getGlobalOptions() { HashMap<FormGlobalOption, String> mapCopy = new HashMap<FormGlobalOption, String>(); mapCopy.putAll(globals); return mapCopy; } /** * Sets the global options for the OmniForm customization. */ public List<OptionValidationError> setGlobalOptions(Map<FormGlobalOption, String> options) { globals.clear(); globals.putAll(options); // TODO(martincochran): there is no error-checking or input validation. This should be // corrected after the demo. return Collections.emptyList(); } /** * Sets the credential options for the given credential group. */ public List<OptionValidationError> setCredentialGroupOptions(String credentialGroupName, Map<PerCredentialOption, String> options) { if (!groups.containsKey(credentialGroupName)) { groups.put(credentialGroupName, new HashMap<PerCredentialOption, String>()); } groups.get(credentialGroupName).clear(); groups.get(credentialGroupName).putAll(options); // TODO(martincochran): there is no error-checking or input validation. This // might already be done by the consumer of this class, but it's an issue worth // revisiting in depth. return Collections.emptyList(); } public Set<String> getCredentialGroups() { return groups.keySet(); } /** * Reads and parses the config from the config file. */ public void readConfig(InputStream is) { Properties config = new Properties(); try { config.load(is); parsePropertiesConfig(config); } catch (IOException e) { LOGGER.log(Level.SEVERE, "Could not read config", e); } catch (IllegalArgumentException e) { LOGGER.log(Level.SEVERE, "Could not parse config", e); } } /** * Read config from file. */ public void readConfig() { if (fileName == null) { LOGGER.log(Level.WARNING, "Attempted to load config when filename is null"); return; } try { FileInputStream fis = new FileInputStream(fileName); readConfig(fis); } catch (FileNotFoundException e) { LOGGER.log(Level.SEVERE, "Could not find file", e); } } public void saveConfig(OutputStream os) { Properties config = getProperties(); try { config.store(os, "OmniForm Customization Options"); } catch (IOException e) { LOGGER.log(Level.SEVERE, "Could not save config file", e); } } public void saveConfigToFile(String fileName) { try { saveConfig(new FileOutputStream(fileName)); } catch (FileNotFoundException e) { LOGGER.log(Level.SEVERE, "Could not save config file", e); } } /** * Returns a properties object that contains all the configuration data. */ private Properties getProperties() { Properties config = new Properties(); for (String name : groups.keySet()) { Map<PerCredentialOption, String> group = groups.get(name); for (Map.Entry<PerCredentialOption, String> option : group.entrySet()) { config.put(canonicalizeName(option.getKey(), name), option.getValue()); } } for (Map.Entry<FormGlobalOption, String> option : globals.entrySet()) { config.put(canonicalizeName(option.getKey()), option.getValue()); } return config; } /** * Parses a Properties object and populates the internal Maps with the data. * @return true if the config was parsed properly */ private boolean parsePropertiesConfig(Properties config) { - Enumeration e = config.propertyNames(); + Enumeration<?> e = config.propertyNames(); boolean success = true; while (e.hasMoreElements()) { - String item = (String) e.nextElement(); + String item = String.class.cast(e.nextElement()); if (isGlobalOption(item)) { globals.put(parseGlobalOption(item), config.getProperty(item)); } else if (isCredentialOption(item)) { String credentialGroup = getCredentialGroup(item); if (!groups.containsKey(credentialGroup)) { groups.put(credentialGroup, new HashMap<PerCredentialOption, String>()); } groups.get(credentialGroup).put(parseCredentialOption(item), config.getProperty(item)); } else { LOGGER.info("Could not parse option: " + item); success = false; } } return success; } private boolean isGlobalOption(String item) { return item.startsWith(getShortClassName(FormGlobalOption.PAGE_TITLE)); } private boolean isCredentialOption(String item) { return item.startsWith(getShortClassName(PerCredentialOption.INTRO_TEXT)); } private FormGlobalOption parseGlobalOption(String option) throws IllegalArgumentException { String[] parts = option.split(":"); if (parts.length < 2) { throw new IllegalArgumentException("Argument improperly formatted: " + option); } return Enum.valueOf(FormGlobalOption.class, parts[1]); } private PerCredentialOption parseCredentialOption(String option) throws IllegalArgumentException { String[] parts = option.split(":"); if (parts.length < 2) { throw new IllegalArgumentException("Argument improperly formatted: " + option); } return Enum.valueOf(PerCredentialOption.class, parts[1]); } private String canonicalizeName(Object obj) { return getShortClassName(obj) + ":" + obj.toString(); } private String canonicalizeName(Object obj, String name) { return getShortClassName(obj) + ":" + obj.toString() + ":" + name; } /** * For an obj of type FormGlobalOption, this function returns * "FormGlobalOption". This is used, rather than the full classname, * because corresponding code on the GSA may exist in a different package * hierarchy. */ private String getShortClassName(Object obj) { String fullName = obj.getClass().getName(); return fullName.substring(fullName.lastIndexOf("$") + 1); } /** * Preconditions: item really is an entry for a Credential option, * not global option. * */ private String getCredentialGroup(String item) throws IllegalArgumentException { String[] parts = item.split(":"); if (parts.length != 3) { LOGGER.severe("Problem with format"); throw new IllegalArgumentException("Argument did not have valid group: " + item); } return parts[2]; } @Override public String toString() { return getProperties().toString(); } @Override public boolean equals(Object obj) { if (!(obj instanceof BasicOmniFormCustomization)) { return false; } return toString().equals(obj.toString()); } }
false
true
private boolean parsePropertiesConfig(Properties config) { Enumeration e = config.propertyNames(); boolean success = true; while (e.hasMoreElements()) { String item = (String) e.nextElement(); if (isGlobalOption(item)) { globals.put(parseGlobalOption(item), config.getProperty(item)); } else if (isCredentialOption(item)) { String credentialGroup = getCredentialGroup(item); if (!groups.containsKey(credentialGroup)) { groups.put(credentialGroup, new HashMap<PerCredentialOption, String>()); } groups.get(credentialGroup).put(parseCredentialOption(item), config.getProperty(item)); } else { LOGGER.info("Could not parse option: " + item); success = false; } } return success; }
private boolean parsePropertiesConfig(Properties config) { Enumeration<?> e = config.propertyNames(); boolean success = true; while (e.hasMoreElements()) { String item = String.class.cast(e.nextElement()); if (isGlobalOption(item)) { globals.put(parseGlobalOption(item), config.getProperty(item)); } else if (isCredentialOption(item)) { String credentialGroup = getCredentialGroup(item); if (!groups.containsKey(credentialGroup)) { groups.put(credentialGroup, new HashMap<PerCredentialOption, String>()); } groups.get(credentialGroup).put(parseCredentialOption(item), config.getProperty(item)); } else { LOGGER.info("Could not parse option: " + item); success = false; } } return success; }
diff --git a/plugins/org.eclipse.tcf.cdt.ui/src/org/eclipse/tcf/internal/cdt/ui/StartupPreferencesInitializer.java b/plugins/org.eclipse.tcf.cdt.ui/src/org/eclipse/tcf/internal/cdt/ui/StartupPreferencesInitializer.java index 087c01860..f82862b4c 100644 --- a/plugins/org.eclipse.tcf.cdt.ui/src/org/eclipse/tcf/internal/cdt/ui/StartupPreferencesInitializer.java +++ b/plugins/org.eclipse.tcf.cdt.ui/src/org/eclipse/tcf/internal/cdt/ui/StartupPreferencesInitializer.java @@ -1,67 +1,66 @@ /******************************************************************************* * Copyright (c) 2012 Wind River Systems, Inc. and others. All rights reserved. * This program and the accompanying materials are made available under the terms * of the Eclipse Public License v1.0 which accompanies this distribution, and is * available at http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Wind River Systems - initial API and implementation *******************************************************************************/ package org.eclipse.tcf.internal.cdt.ui; import org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer; import org.eclipse.debug.internal.ui.DebugUIPlugin; import org.eclipse.debug.internal.ui.IInternalDebugUIConstants; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.ui.IStartup; import org.eclipse.ui.PlatformUI; /** * Set the default TCF CDT plugin preferences */ @SuppressWarnings("restriction") public class StartupPreferencesInitializer extends AbstractPreferenceInitializer implements IStartup { /* (non-Javadoc) * @see org.eclipse.ui.IStartup#earlyStartup() */ public void earlyStartup() { PlatformUI.getWorkbench().getDisplay().syncExec(new Runnable() { public void run() { initializeDefaultPreferences(); } }); } /* (non-Javadoc) * @see org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer#initializeDefaultPreferences() */ @Override public void initializeDefaultPreferences() { // "TCF Remote Application" launch is hidden by default. // No longer supported or maintained. IPreferenceStore store = DebugUIPlugin.getDefault().getPreferenceStore(); if (store != null) { - store.setValue(IInternalDebugUIConstants.PREF_FILTER_LAUNCH_TYPES, true); + store.setDefault(IInternalDebugUIConstants.PREF_FILTER_LAUNCH_TYPES, true); boolean added = false; String typeId = "org.eclipse.tcf.cdt.launch.remoteApplicationLaunchType"; //$NON-NLS-1$ - String typeList = store.getString(IInternalDebugUIConstants.PREF_FILTER_TYPE_LIST); - if ("".equals(typeList)) typeList = store.getDefaultString(IInternalDebugUIConstants.PREF_FILTER_TYPE_LIST); + String typeList = store.getDefaultString(IInternalDebugUIConstants.PREF_FILTER_TYPE_LIST); if ("".equals(typeList)) { typeList = typeId; added = true; } else if (!typeList.contains(typeId)) { typeList = typeList + "," + typeId; //$NON-NLS-1$ added = true; } if (added) { - store.putValue(IInternalDebugUIConstants.PREF_FILTER_TYPE_LIST, typeList); + store.setDefault(IInternalDebugUIConstants.PREF_FILTER_TYPE_LIST, typeList); } } } }
false
true
public void initializeDefaultPreferences() { // "TCF Remote Application" launch is hidden by default. // No longer supported or maintained. IPreferenceStore store = DebugUIPlugin.getDefault().getPreferenceStore(); if (store != null) { store.setValue(IInternalDebugUIConstants.PREF_FILTER_LAUNCH_TYPES, true); boolean added = false; String typeId = "org.eclipse.tcf.cdt.launch.remoteApplicationLaunchType"; //$NON-NLS-1$ String typeList = store.getString(IInternalDebugUIConstants.PREF_FILTER_TYPE_LIST); if ("".equals(typeList)) typeList = store.getDefaultString(IInternalDebugUIConstants.PREF_FILTER_TYPE_LIST); if ("".equals(typeList)) { typeList = typeId; added = true; } else if (!typeList.contains(typeId)) { typeList = typeList + "," + typeId; //$NON-NLS-1$ added = true; } if (added) { store.putValue(IInternalDebugUIConstants.PREF_FILTER_TYPE_LIST, typeList); } } }
public void initializeDefaultPreferences() { // "TCF Remote Application" launch is hidden by default. // No longer supported or maintained. IPreferenceStore store = DebugUIPlugin.getDefault().getPreferenceStore(); if (store != null) { store.setDefault(IInternalDebugUIConstants.PREF_FILTER_LAUNCH_TYPES, true); boolean added = false; String typeId = "org.eclipse.tcf.cdt.launch.remoteApplicationLaunchType"; //$NON-NLS-1$ String typeList = store.getDefaultString(IInternalDebugUIConstants.PREF_FILTER_TYPE_LIST); if ("".equals(typeList)) { typeList = typeId; added = true; } else if (!typeList.contains(typeId)) { typeList = typeList + "," + typeId; //$NON-NLS-1$ added = true; } if (added) { store.setDefault(IInternalDebugUIConstants.PREF_FILTER_TYPE_LIST, typeList); } } }
diff --git a/src/me/corriekay/pppopp3/remotechest/RemoteChest.java b/src/me/corriekay/pppopp3/remotechest/RemoteChest.java index d3f1cfc..be2d30e 100644 --- a/src/me/corriekay/pppopp3/remotechest/RemoteChest.java +++ b/src/me/corriekay/pppopp3/remotechest/RemoteChest.java @@ -1,244 +1,243 @@ package me.corriekay.pppopp3.remotechest; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import mc.alk.arena.util.ExpUtil; import me.corriekay.pppopp3.modules.Equestria; import me.corriekay.pppopp3.ponyville.Pony; import me.corriekay.pppopp3.ponyville.Ponyville; import me.corriekay.pppopp3.utils.PSCmdExe; import me.corriekay.pppopp3.utils.Utils; import org.bukkit.Bukkit; import org.bukkit.Material; import org.bukkit.World; import org.bukkit.command.Command; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.entity.EntityDamageByEntityEvent; import org.bukkit.event.entity.PlayerDeathEvent; import org.bukkit.event.inventory.InventoryClickEvent; import org.bukkit.event.inventory.InventoryCloseEvent; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.PlayerInventory; import org.bukkit.inventory.meta.SkullMeta; public class RemoteChest extends PSCmdExe{ private HashMap<String,Inventory> viewingInvs = new HashMap<String,Inventory>(); public RemoteChest(){ super("RemoteChest", "c", "w", "transferchest"); } public boolean handleCommand(CommandSender sender, Command cmd, String label, String[] args){ String cmdn = cmd.getName(); if(!(sender instanceof Player)) { sendMessage(sender, notPlayer); return true; } Player player = (Player)sender; if(cmdn.equals("c")) { Pony pony = Ponyville.getPony(player); Inventory inv = pony.getRemoteChest(Equestria.get().getParentWorld(player.getWorld())); if(inv == null) { sendMessage(player, "There is no remote chest for this world!"); return true; } player.openInventory(inv); viewingInvs.put(player.getName(), inv); return true; } if(cmdn.equals("w")) { player.openWorkbench(null, true); return true; } if(cmd.getName().equals("transferchest")) { World world = Equestria.get().getParentWorld(player.getWorld()); Pony pony = Ponyville.getPony(player); Inventory inv = pony.getRemoteChest(world); if(inv == null) { sendMessage(player, "There is no remote chest for this world!"); return true; } if(args.length == 0) { transferChest(player, player.getInventory(), inv, "null", false); sendMessage(player, "Items transferred!"); return true; } if(args[0].equals("help")) { sendMessage(player, "transferchest is a powerful item transferral tool for the on-the-go miner! if you just want to dump your inventory into your chest, just type /transferchest. Simple as that! If you want to use a \"smart\" mode, type either /transferchest ore (to transfer ores and ingots) or /transferchest material (to transfer materials, such as dirt, gravel, sand, cobblestone) to your chest. If youre feeling adventurous, type /transferchest <material type (or) material id> to transfer a specific type of item to your inventory!"); return true; } if(args[0].equals("ore") || args[0].equals("material")) { transferChest(player, player.getInventory(), inv, args[0], false); sendMessage(player, "Items Transferred!"); return true; } boolean correct = true; Material mat = null; int matId; - try { - mat = Material.matchMaterial(args[0]); - correct = true; - } catch(IllegalArgumentException e) { + mat = Material.matchMaterial(args[0]); + correct = true; + if(mat == null) { try { matId = Integer.parseInt(args[0]); mat = Material.getMaterial(matId); correct = true; } catch(NumberFormatException e2) { correct = false; } } if(correct) { transferChest(player, player.getInventory(), inv, mat.name(), true); sendMessage(player, "Items transferred!"); return true; } else { sendMessage(player, "Sorry... I couldnt find that item type!"); return true; } } return true; } @EventHandler public void invclick(InventoryClickEvent event){ if(event.getWhoClicked() instanceof Player) { Player player = (Player)event.getWhoClicked(); if(viewingInvs.containsKey(player.getName())) { Pony pony = Ponyville.getPony(player); String invWorldname = pony.getRCWorld(viewingInvs.get(player.getName())).getName(); String actualWorldName = player.getWorld().getName(); if(invWorldname != actualWorldName) { event.setCancelled(true); player.closeInventory(); viewingInvs.remove(player.getName()); return; } } } } @EventHandler public void invclose(InventoryCloseEvent event){ if(event.getPlayer() instanceof Player) { Player p = (Player)event.getPlayer(); if(viewingInvs.containsKey(p.getName())) { Inventory inv2 = viewingInvs.get(p.getName()); viewingInvs.remove(p.getName()); Pony pony = Ponyville.getPony(p); pony.saveRemoteChest(pony.getRCWorld(inv2)); pony.save(); System.out.println("saving inventory"); return; } } } protected static void transferChest(Player player, PlayerInventory playerInv, Inventory chestInv, String param, boolean isMat){ HashSet<Material> typesToTransfer = new HashSet<Material>(); if(isMat) { Material mat = Material.getMaterial(param); typesToTransfer.add(mat); } else { if(param.equals("ore")) { typesToTransfer.add(Material.COAL); typesToTransfer.add(Material.COAL_ORE); typesToTransfer.add(Material.DIAMOND); typesToTransfer.add(Material.DIAMOND_ORE); typesToTransfer.add(Material.DIAMOND_BLOCK); typesToTransfer.add(Material.IRON_INGOT); typesToTransfer.add(Material.IRON_ORE); typesToTransfer.add(Material.IRON_BLOCK); typesToTransfer.add(Material.GOLD_INGOT); typesToTransfer.add(Material.GOLD_ORE); typesToTransfer.add(Material.GOLD_BLOCK); typesToTransfer.add(Material.LAPIS_ORE); typesToTransfer.add(Material.LAPIS_BLOCK); typesToTransfer.add(Material.REDSTONE_ORE); typesToTransfer.add(Material.REDSTONE); } if(param.equals("material")) { typesToTransfer.add(Material.DIRT); typesToTransfer.add(Material.STONE); typesToTransfer.add(Material.COBBLESTONE); typesToTransfer.add(Material.SAND); typesToTransfer.add(Material.GRAVEL); typesToTransfer.add(Material.CLAY_BALL); typesToTransfer.add(Material.WOOD); typesToTransfer.add(Material.LOG); typesToTransfer.add(Material.SNOW_BALL); typesToTransfer.add(Material.SNOW_BLOCK); } if(param.equals("null")) {//dumb transfer/alltransfer for(Material material : Material.values()) { typesToTransfer.add(material); } } } for(int i = 9; i <= 35; i++) { ItemStack playerIs = playerInv.getContents()[i]; if(playerIs == null) { playerIs = new ItemStack(Material.AIR); } if(typesToTransfer.contains(playerIs.getType())) { try { HashMap<Integer,ItemStack> returnedItems = chestInv.addItem(playerInv.getItem(i)); playerInv.setItem(i, returnedItems.get(0)); } catch(NullPointerException e) {} } } } @EventHandler public void onDeath(PlayerDeathEvent event){ if(Equestria.get().getParentWorld(event.getEntity().getWorld()).getName().equals("badlands")) { event.setKeepLevel(false); if(!(event.getEntity().getLastDamageCause() instanceof EntityDamageByEntityEvent)) { event.setDroppedExp(0); event.setNewTotalExp(0); return; } else { Player player = event.getEntity(); EntityDamageByEntityEvent edbee = (EntityDamageByEntityEvent)player.getLastDamageCause(); if(!(edbee.getDamager() instanceof Player)) { event.setDroppedExp(0); event.setNewTotalExp(0); return; } Player killer = (Player)edbee.getDamager(); Pony pony = Ponyville.getPony(player); Inventory inv = pony.getRemoteChest(Bukkit.getWorld("badlands")); for(ItemStack is : inv.getContents()) { event.getDrops().add(is); } inv.clear(); pony.saveRemoteChest(Bukkit.getWorld("badlands")); pony.save(); ExpUtil.giveExperience(killer, event.getDroppedExp() + event.getNewExp()); event.setDroppedExp(0); event.setNewTotalExp(0); ItemStack is = new ItemStack(Material.SKULL_ITEM, 1, (byte)3); SkullMeta sm = (SkullMeta)is.getItemMeta(); sm.setOwner(player.getName()); List<String> lore = new ArrayList<String>(); String weapon = "fists"; if(killer.getItemInHand() != null) { weapon = killer.getItemInHand().getType().name(); } lore.add("Killed by " + killer.getName() + " with " + weapon + "!"); lore.add("Time of death: " + Utils.getDate(System.currentTimeMillis())); sm.setLore(lore); is.setItemMeta(sm); event.getDrops().add(is); } } } }
true
true
public boolean handleCommand(CommandSender sender, Command cmd, String label, String[] args){ String cmdn = cmd.getName(); if(!(sender instanceof Player)) { sendMessage(sender, notPlayer); return true; } Player player = (Player)sender; if(cmdn.equals("c")) { Pony pony = Ponyville.getPony(player); Inventory inv = pony.getRemoteChest(Equestria.get().getParentWorld(player.getWorld())); if(inv == null) { sendMessage(player, "There is no remote chest for this world!"); return true; } player.openInventory(inv); viewingInvs.put(player.getName(), inv); return true; } if(cmdn.equals("w")) { player.openWorkbench(null, true); return true; } if(cmd.getName().equals("transferchest")) { World world = Equestria.get().getParentWorld(player.getWorld()); Pony pony = Ponyville.getPony(player); Inventory inv = pony.getRemoteChest(world); if(inv == null) { sendMessage(player, "There is no remote chest for this world!"); return true; } if(args.length == 0) { transferChest(player, player.getInventory(), inv, "null", false); sendMessage(player, "Items transferred!"); return true; } if(args[0].equals("help")) { sendMessage(player, "transferchest is a powerful item transferral tool for the on-the-go miner! if you just want to dump your inventory into your chest, just type /transferchest. Simple as that! If you want to use a \"smart\" mode, type either /transferchest ore (to transfer ores and ingots) or /transferchest material (to transfer materials, such as dirt, gravel, sand, cobblestone) to your chest. If youre feeling adventurous, type /transferchest <material type (or) material id> to transfer a specific type of item to your inventory!"); return true; } if(args[0].equals("ore") || args[0].equals("material")) { transferChest(player, player.getInventory(), inv, args[0], false); sendMessage(player, "Items Transferred!"); return true; } boolean correct = true; Material mat = null; int matId; try { mat = Material.matchMaterial(args[0]); correct = true; } catch(IllegalArgumentException e) { try { matId = Integer.parseInt(args[0]); mat = Material.getMaterial(matId); correct = true; } catch(NumberFormatException e2) { correct = false; } } if(correct) { transferChest(player, player.getInventory(), inv, mat.name(), true); sendMessage(player, "Items transferred!"); return true; } else { sendMessage(player, "Sorry... I couldnt find that item type!"); return true; } } return true; }
public boolean handleCommand(CommandSender sender, Command cmd, String label, String[] args){ String cmdn = cmd.getName(); if(!(sender instanceof Player)) { sendMessage(sender, notPlayer); return true; } Player player = (Player)sender; if(cmdn.equals("c")) { Pony pony = Ponyville.getPony(player); Inventory inv = pony.getRemoteChest(Equestria.get().getParentWorld(player.getWorld())); if(inv == null) { sendMessage(player, "There is no remote chest for this world!"); return true; } player.openInventory(inv); viewingInvs.put(player.getName(), inv); return true; } if(cmdn.equals("w")) { player.openWorkbench(null, true); return true; } if(cmd.getName().equals("transferchest")) { World world = Equestria.get().getParentWorld(player.getWorld()); Pony pony = Ponyville.getPony(player); Inventory inv = pony.getRemoteChest(world); if(inv == null) { sendMessage(player, "There is no remote chest for this world!"); return true; } if(args.length == 0) { transferChest(player, player.getInventory(), inv, "null", false); sendMessage(player, "Items transferred!"); return true; } if(args[0].equals("help")) { sendMessage(player, "transferchest is a powerful item transferral tool for the on-the-go miner! if you just want to dump your inventory into your chest, just type /transferchest. Simple as that! If you want to use a \"smart\" mode, type either /transferchest ore (to transfer ores and ingots) or /transferchest material (to transfer materials, such as dirt, gravel, sand, cobblestone) to your chest. If youre feeling adventurous, type /transferchest <material type (or) material id> to transfer a specific type of item to your inventory!"); return true; } if(args[0].equals("ore") || args[0].equals("material")) { transferChest(player, player.getInventory(), inv, args[0], false); sendMessage(player, "Items Transferred!"); return true; } boolean correct = true; Material mat = null; int matId; mat = Material.matchMaterial(args[0]); correct = true; if(mat == null) { try { matId = Integer.parseInt(args[0]); mat = Material.getMaterial(matId); correct = true; } catch(NumberFormatException e2) { correct = false; } } if(correct) { transferChest(player, player.getInventory(), inv, mat.name(), true); sendMessage(player, "Items transferred!"); return true; } else { sendMessage(player, "Sorry... I couldnt find that item type!"); return true; } } return true; }
diff --git a/ps3mediaserver/net/pms/network/RequestHandler.java b/ps3mediaserver/net/pms/network/RequestHandler.java index 3667cb00..e2a129c8 100644 --- a/ps3mediaserver/net/pms/network/RequestHandler.java +++ b/ps3mediaserver/net/pms/network/RequestHandler.java @@ -1,157 +1,157 @@ /* * PS3 Media Server, for streaming any medias to your PS3. * Copyright (C) 2008 A.Brochard * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; version 2 * of the License only. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package net.pms.network; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.Socket; import java.util.StringTokenizer; import net.pms.PMS; import net.pms.dlna.DLNAMediaInfo; public class RequestHandler implements Runnable { public final static int SOCKET_BUF_SIZE = 32768; private Socket socket; private OutputStream output; private BufferedReader br; public RequestHandler(Socket socket) throws IOException { this.socket = socket; if (PMS.get().isTurbomode()) { try { //socket.setSendBufferSize(SOCKET_BUF_SIZE); } catch(Exception e) { PMS.error(null, e); } try { socket.setTcpNoDelay(true); } catch(Exception e) { PMS.error(null, e); } } this.output = socket.getOutputStream(); this.br = new BufferedReader(new InputStreamReader(socket.getInputStream())); } public void run() { Request request = null; try { PMS.debug("Opened handler on socket " + socket); int receivedContentLength = -1; String headerLine = br.readLine(); while (headerLine != null && headerLine.length() > 0) { PMS.debug( "Received on socket: " + headerLine); if (headerLine != null && headerLine.indexOf("PLAYSTATION") >-1) PMS.get().setPs3found(true); else if (headerLine != null && headerLine.indexOf("Xbox") >-1) PMS.get().setXboxfound(true); try { StringTokenizer s = new StringTokenizer(headerLine); String temp = s.nextToken(); if (temp.equals("GET") || temp.equals("POST") || temp.equals("HEAD")) { request = new Request(temp, s.nextToken().substring(1)); if (s.hasMoreTokens() && s.nextToken().equals("HTTP/1.0")) request.setHttp10(true); } else if (request != null && temp.equals("SOAPACTION:")) { request.setSoapaction(s.nextToken()); } else if (headerLine.toUpperCase().contains("CONTENT-LENGTH:")) { receivedContentLength = Integer.parseInt(headerLine.substring(headerLine.toUpperCase().indexOf("CONTENT-LENGTH: ")+16)); } else if (headerLine.indexOf("Range: bytes=") > -1) { String nums = headerLine.substring(headerLine.indexOf("Range: bytes=")+13).trim(); StringTokenizer st = new StringTokenizer(nums, "-"); if (!nums.startsWith("-")) request.setLowRange(Long.parseLong(st.nextToken())); if (!nums.startsWith("-") && !nums.endsWith("-")) request.setHighRange(Long.parseLong(st.nextToken())); else request.setHighRange(DLNAMediaInfo.TRANS_SIZE); } else if (headerLine.indexOf("transferMode.dlna.org:") > -1) { request.setTransferMode(headerLine); } else if (headerLine.indexOf("TimeSeekRange.dlna.org: npt=") > -1) { // firmware 2.50+ String timeseek = headerLine.substring(headerLine.indexOf("TimeSeekRange.dlna.org: npt=")+28); if (timeseek.endsWith("-")) timeseek = timeseek.substring(0, timeseek.length()-1); request.setTimeseek(Double.parseDouble(timeseek)); } else if (headerLine.indexOf("TimeSeekRange.dlna.org : npt=") > -1) { // firmware 2.40 String timeseek = headerLine.substring(headerLine.indexOf("TimeSeekRange.dlna.org : npt=")+29); if (timeseek.endsWith("-")) timeseek = timeseek.substring(0, timeseek.length()-1); request.setTimeseek(Double.parseDouble(timeseek)); } } catch (Exception e) { PMS.error("Error in parsing HTTP headers", e); } headerLine = br.readLine(); } - if (receivedContentLength > -1) { + if (receivedContentLength > 0) { char buf [] = new char [receivedContentLength-1]; br.read(buf); if (request != null) request.setTextContent(new String(buf)); } if (request != null) PMS.info( "HTTP: " + request.getArgument() + " / " + request.getLowRange() + "-" + request.getHighRange()); if (request != null) request.answer(output); if (request != null && request.getInputStream() != null) request.getInputStream().close(); } catch (IOException e) { PMS.debug("Unexpected IO Error: " + e.getClass() + ": " + e.getMessage()); if (request != null && request.getInputStream() != null) { try { PMS.debug( "Close InputStream" + request.getInputStream()); request.getInputStream().close(); } catch (IOException e1) { PMS.error("Close InputStream Error", e); } } } finally { try { output.close(); br.close(); socket.close(); } catch (IOException e) { PMS.error("Close Connection Error", e); } PMS.debug("Close Connection"); } } }
true
true
public void run() { Request request = null; try { PMS.debug("Opened handler on socket " + socket); int receivedContentLength = -1; String headerLine = br.readLine(); while (headerLine != null && headerLine.length() > 0) { PMS.debug( "Received on socket: " + headerLine); if (headerLine != null && headerLine.indexOf("PLAYSTATION") >-1) PMS.get().setPs3found(true); else if (headerLine != null && headerLine.indexOf("Xbox") >-1) PMS.get().setXboxfound(true); try { StringTokenizer s = new StringTokenizer(headerLine); String temp = s.nextToken(); if (temp.equals("GET") || temp.equals("POST") || temp.equals("HEAD")) { request = new Request(temp, s.nextToken().substring(1)); if (s.hasMoreTokens() && s.nextToken().equals("HTTP/1.0")) request.setHttp10(true); } else if (request != null && temp.equals("SOAPACTION:")) { request.setSoapaction(s.nextToken()); } else if (headerLine.toUpperCase().contains("CONTENT-LENGTH:")) { receivedContentLength = Integer.parseInt(headerLine.substring(headerLine.toUpperCase().indexOf("CONTENT-LENGTH: ")+16)); } else if (headerLine.indexOf("Range: bytes=") > -1) { String nums = headerLine.substring(headerLine.indexOf("Range: bytes=")+13).trim(); StringTokenizer st = new StringTokenizer(nums, "-"); if (!nums.startsWith("-")) request.setLowRange(Long.parseLong(st.nextToken())); if (!nums.startsWith("-") && !nums.endsWith("-")) request.setHighRange(Long.parseLong(st.nextToken())); else request.setHighRange(DLNAMediaInfo.TRANS_SIZE); } else if (headerLine.indexOf("transferMode.dlna.org:") > -1) { request.setTransferMode(headerLine); } else if (headerLine.indexOf("TimeSeekRange.dlna.org: npt=") > -1) { // firmware 2.50+ String timeseek = headerLine.substring(headerLine.indexOf("TimeSeekRange.dlna.org: npt=")+28); if (timeseek.endsWith("-")) timeseek = timeseek.substring(0, timeseek.length()-1); request.setTimeseek(Double.parseDouble(timeseek)); } else if (headerLine.indexOf("TimeSeekRange.dlna.org : npt=") > -1) { // firmware 2.40 String timeseek = headerLine.substring(headerLine.indexOf("TimeSeekRange.dlna.org : npt=")+29); if (timeseek.endsWith("-")) timeseek = timeseek.substring(0, timeseek.length()-1); request.setTimeseek(Double.parseDouble(timeseek)); } } catch (Exception e) { PMS.error("Error in parsing HTTP headers", e); } headerLine = br.readLine(); } if (receivedContentLength > -1) { char buf [] = new char [receivedContentLength-1]; br.read(buf); if (request != null) request.setTextContent(new String(buf)); } if (request != null) PMS.info( "HTTP: " + request.getArgument() + " / " + request.getLowRange() + "-" + request.getHighRange()); if (request != null) request.answer(output); if (request != null && request.getInputStream() != null) request.getInputStream().close(); } catch (IOException e) { PMS.debug("Unexpected IO Error: " + e.getClass() + ": " + e.getMessage()); if (request != null && request.getInputStream() != null) { try { PMS.debug( "Close InputStream" + request.getInputStream()); request.getInputStream().close(); } catch (IOException e1) { PMS.error("Close InputStream Error", e); } } } finally { try { output.close(); br.close(); socket.close(); } catch (IOException e) { PMS.error("Close Connection Error", e); } PMS.debug("Close Connection"); } }
public void run() { Request request = null; try { PMS.debug("Opened handler on socket " + socket); int receivedContentLength = -1; String headerLine = br.readLine(); while (headerLine != null && headerLine.length() > 0) { PMS.debug( "Received on socket: " + headerLine); if (headerLine != null && headerLine.indexOf("PLAYSTATION") >-1) PMS.get().setPs3found(true); else if (headerLine != null && headerLine.indexOf("Xbox") >-1) PMS.get().setXboxfound(true); try { StringTokenizer s = new StringTokenizer(headerLine); String temp = s.nextToken(); if (temp.equals("GET") || temp.equals("POST") || temp.equals("HEAD")) { request = new Request(temp, s.nextToken().substring(1)); if (s.hasMoreTokens() && s.nextToken().equals("HTTP/1.0")) request.setHttp10(true); } else if (request != null && temp.equals("SOAPACTION:")) { request.setSoapaction(s.nextToken()); } else if (headerLine.toUpperCase().contains("CONTENT-LENGTH:")) { receivedContentLength = Integer.parseInt(headerLine.substring(headerLine.toUpperCase().indexOf("CONTENT-LENGTH: ")+16)); } else if (headerLine.indexOf("Range: bytes=") > -1) { String nums = headerLine.substring(headerLine.indexOf("Range: bytes=")+13).trim(); StringTokenizer st = new StringTokenizer(nums, "-"); if (!nums.startsWith("-")) request.setLowRange(Long.parseLong(st.nextToken())); if (!nums.startsWith("-") && !nums.endsWith("-")) request.setHighRange(Long.parseLong(st.nextToken())); else request.setHighRange(DLNAMediaInfo.TRANS_SIZE); } else if (headerLine.indexOf("transferMode.dlna.org:") > -1) { request.setTransferMode(headerLine); } else if (headerLine.indexOf("TimeSeekRange.dlna.org: npt=") > -1) { // firmware 2.50+ String timeseek = headerLine.substring(headerLine.indexOf("TimeSeekRange.dlna.org: npt=")+28); if (timeseek.endsWith("-")) timeseek = timeseek.substring(0, timeseek.length()-1); request.setTimeseek(Double.parseDouble(timeseek)); } else if (headerLine.indexOf("TimeSeekRange.dlna.org : npt=") > -1) { // firmware 2.40 String timeseek = headerLine.substring(headerLine.indexOf("TimeSeekRange.dlna.org : npt=")+29); if (timeseek.endsWith("-")) timeseek = timeseek.substring(0, timeseek.length()-1); request.setTimeseek(Double.parseDouble(timeseek)); } } catch (Exception e) { PMS.error("Error in parsing HTTP headers", e); } headerLine = br.readLine(); } if (receivedContentLength > 0) { char buf [] = new char [receivedContentLength-1]; br.read(buf); if (request != null) request.setTextContent(new String(buf)); } if (request != null) PMS.info( "HTTP: " + request.getArgument() + " / " + request.getLowRange() + "-" + request.getHighRange()); if (request != null) request.answer(output); if (request != null && request.getInputStream() != null) request.getInputStream().close(); } catch (IOException e) { PMS.debug("Unexpected IO Error: " + e.getClass() + ": " + e.getMessage()); if (request != null && request.getInputStream() != null) { try { PMS.debug( "Close InputStream" + request.getInputStream()); request.getInputStream().close(); } catch (IOException e1) { PMS.error("Close InputStream Error", e); } } } finally { try { output.close(); br.close(); socket.close(); } catch (IOException e) { PMS.error("Close Connection Error", e); } PMS.debug("Close Connection"); } }
diff --git a/backend/grisu-core/src/main/java/org/vpac/grisu/js/control/job/gt4/GT4Submitter.java b/backend/grisu-core/src/main/java/org/vpac/grisu/js/control/job/gt4/GT4Submitter.java index df2d0ba..113ef16 100644 --- a/backend/grisu-core/src/main/java/org/vpac/grisu/js/control/job/gt4/GT4Submitter.java +++ b/backend/grisu-core/src/main/java/org/vpac/grisu/js/control/job/gt4/GT4Submitter.java @@ -1,693 +1,693 @@ package org.vpac.grisu.js.control.job.gt4; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.StringWriter; import java.net.URL; import java.util.Date; import java.util.Map; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.TransformerFactoryConfigurationError; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import org.apache.axis.message.addressing.EndpointReferenceType; import org.apache.log4j.Logger; import org.globus.exec.client.GramJob; import org.globus.exec.generated.JobDescriptionType; import org.globus.exec.utils.client.ManagedJobFactoryClientHelper; import org.globus.exec.utils.rsl.RSLHelper; import org.globus.exec.utils.rsl.RSLParseException; import org.globus.wsrf.impl.security.authentication.Constants; import org.globus.wsrf.impl.security.authorization.Authorization; import org.globus.wsrf.impl.security.authorization.HostAuthorization; import org.ietf.jgss.GSSCredential; import org.vpac.grisu.control.JobConstants; import org.vpac.grisu.control.ServiceInterface; import org.vpac.grisu.control.SeveralXMLHelpers; import org.vpac.grisu.control.exceptions.NoValidCredentialException; import org.vpac.grisu.control.exceptions.ServerJobSubmissionException; import org.vpac.grisu.control.info.CachedMdsInformationManager; import org.vpac.grisu.control.info.InformationManager; import org.vpac.grisu.control.utils.DebugUtils; import org.vpac.grisu.control.utils.ServerPropertiesManager; import org.vpac.grisu.credential.model.ProxyCredential; import org.vpac.grisu.js.control.job.JobSubmitter; import org.vpac.grisu.js.model.Job; import org.vpac.grisu.js.model.utils.JsdlHelpers; import org.vpac.security.light.CredentialHelpers; import org.w3c.dom.Document; import org.w3c.dom.Element; /** * This class is the connector class between grisu and our GT4 gateways. It * translates the jsdl document into the rsl format and also knows how to submit * a job to a GT4 endpoint using WS-GRAM. * * @author Markus Binsteiner * */ public class GT4Submitter extends JobSubmitter { static final Logger myLogger = Logger.getLogger(GT4Submitter.class .getName()); protected InformationManager informationManager = CachedMdsInformationManager .getDefaultCachedMdsInformationManager(); /* * (non-Javadoc) * * @see * org.vpac.grisu.js.control.job.JobSubmitter#createJobSubmissionDescription * (org.w3c.dom.Document) */ private String createJobSubmissionDescription( ServiceInterface serviceInterface, Document jsdl) throws ServerJobSubmissionException { DebugUtils.jsdlDebugOutput("Before translating into rsl: ", jsdl); Document output = null; try { DocumentBuilderFactory docFactory = DocumentBuilderFactory .newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); output = docBuilder.newDocument(); } catch (ParserConfigurationException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } // Add root element Element job = output.createElement("job"); output.appendChild(job); // Add "executable" node Element executable = output.createElement("executable"); executable.setTextContent(JsdlHelpers.getPosixApplication(jsdl)); job.appendChild(executable); // Add "argument"s String[] arguments = JsdlHelpers.getPosixApplicationArguments(jsdl); for (String argument : arguments) { if (argument != null && !"".equals(argument.trim())) { Element argument_node = output.createElement("argument"); argument_node.setTextContent(argument); job.appendChild(argument_node); } } // Add "directory" Element directory = output.createElement("directory"); directory.setTextContent(JsdlHelpers.getWorkingDirectory(jsdl)); job.appendChild(directory); // "stdin" element if available String stdinValue = JsdlHelpers.getPosixStandardInput(jsdl); if (stdinValue != null && !"".equals(stdinValue)) { Element stdin = output.createElement("stdin"); stdin.setTextContent(stdinValue); job.appendChild(stdin); } // Add "stdout" Element stdout = output.createElement("stdout"); stdout.setTextContent(JsdlHelpers.getPosixStandardOutput(jsdl)); job.appendChild(stdout); // Add "stderr" Element stderr = output.createElement("stderr"); stderr.setTextContent(JsdlHelpers.getPosixStandardError(jsdl)); job.appendChild(stderr); // Add "queue" node // TODO change that once I know how to specify queues in jsdl String queue = JsdlHelpers.getCandidateHosts(jsdl)[0]; // TODO this // always uses // the first // candidate // host - not // good if (queue.indexOf(":") != -1) { queue = queue.substring(0, queue.indexOf(":")); Element queue_node = output.createElement("queue"); queue_node.setTextContent(queue); job.appendChild(queue_node); } // Add "jobtype" if mpi int processorCount = JsdlHelpers.getProcessorCount(jsdl); Element jobType = output.createElement("jobType"); String jobTypeString = JsdlHelpers.getJobType(jsdl); if (processorCount > 1) { Element count = output.createElement("count"); count.setTextContent(new Integer(processorCount).toString()); job.appendChild(count); if (jobTypeString == null) { jobType.setTextContent("mpi"); } else { jobType.setTextContent(jobTypeString); } } else { if (jobTypeString == null) { jobType.setTextContent("single"); } else { jobType.setTextContent(jobTypeString); } } job.appendChild(jobType); // total memory Long memory = JsdlHelpers.getTotalMemoryRequirement(jsdl); if (memory != null && memory >= 0) { Element totalMemory = output.createElement("maxMemory"); // convert from bytes to mb memory = memory / 1024; totalMemory.setTextContent(memory.toString()); job.appendChild(totalMemory); } // Add "maxWallTime" node int walltime = JsdlHelpers.getWalltime(jsdl); if (walltime > 0) { Element maxWallTime = output.createElement("maxWallTime"); int wt = new Integer(JsdlHelpers.getWalltime(jsdl)); // convert to minutes wt = wt / 60; maxWallTime.setTextContent(new Integer(wt).toString()); job.appendChild(maxWallTime); } Element fileStageIn = output.createElement("fileStageIn"); // stage ins // Map<String, String> stageIns = JsdlHelpers.getStageIns(jsdl); // // only append stageIns element if not 0 because globus will reject // the job // if there is an empyt <stageIns> tag // if ( stageIns.size() > 0 ) { // for ( String source : stageIns.keySet() ) { // Element stageIn = output.createElement("transfer"); // Element sourceURL = output.createElement("sourceUrl"); // sourceURL.setTextContent(source); // stageIn.appendChild(sourceURL); // Element targetURL = output.createElement("destinationUrl"); // targetURL.setTextContent(stageIns.get(source)); // stageIn.appendChild(targetURL); // // fileStageIn.appendChild(stageIn); // } // job.appendChild(fileStageIn); // } // Extensions Element extensions = output.createElement("extensions"); // jobname Element jobname = output.createElement("jobname"); String jobname_string = JsdlHelpers.getJobname(jsdl); // because of some pbs restrictions we have to keep the jobname to 6 // chars if (jobname_string.length() > 6) { jobname.setTextContent(jobname_string.substring(jobname_string .length() - 6)); } // jobname.setTextContent(jobname_string); extensions.appendChild(jobname); // module -- old style String[] modules_string = null; try { modules_string = JsdlHelpers.getModules(jsdl); } catch (Exception e) { // doesn't matter } - if (modules_string != null && modules_string.length == 0) { + if (modules_string != null && modules_string.length > 0) { for (String module_string : modules_string) { if (!"".equals(module_string)) { Element module = output.createElement("module"); module.setTextContent(module_string); extensions.appendChild(module); } } } else { // try to determine module to load from mds -- this will be the // default way of doing it later on and the module element will // disappear // it was stupid in the first place to have it... String application = JsdlHelpers.getApplicationName(jsdl); String version = JsdlHelpers.getApplicationVersion(jsdl); String subLoc = JsdlHelpers.getCandidateHosts(jsdl)[0]; if (application != null || version != null || subLoc != null) { Map<String, String> appDetails = serviceInterface .getApplicationDetails(application, version, subLoc); String modulesString = appDetails .get(JobConstants.MDS_MODULES_KEY); if ( modules_string == null || "".equals(modules_string) ) { myLogger.warn("No module for this application/version/submissionLocation found. Submitting nonetheless..."); } if (modulesString != null && modulesString.length() > 0) { modules_string = appDetails.get( JobConstants.MDS_MODULES_KEY).split(","); for (String module_string : modules_string) { if (!"".equals(module_string)) { Element module = output.createElement("module"); module.setTextContent(module_string); extensions.appendChild(module); } } } } else { throw new ServerJobSubmissionException("Can't determine either application, version or submissionLocation."); } } // email String email = JsdlHelpers.getEmail(jsdl); if (email != null && !"".equals(email)) { Element email_address = output.createElement("email_address"); email_address.setTextContent(email); extensions.appendChild(email_address); if (JsdlHelpers.sendEmailOnJobStart(jsdl)) { Element emailonexecution = output .createElement("emailonexecution"); emailonexecution.setTextContent("yes"); extensions.appendChild(emailonexecution); } if (JsdlHelpers.sendEmailOnJobFinish(jsdl)) { Element emailonabort = output.createElement("emailonabort"); emailonabort.setTextContent("yes"); Element emailontermination = output .createElement("emailontermination"); emailontermination.setTextContent("yes"); extensions.appendChild(emailonabort); extensions.appendChild(emailontermination); } } job.appendChild(extensions); // initialize StreamResult with InputFile object to save to file StreamResult result = null; try { Transformer transformer = TransformerFactory.newInstance() .newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); result = new StreamResult(new StringWriter()); DOMSource source = new DOMSource(output); transformer.transform(source, result); } catch (TransformerConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (TransformerFactoryConfigurationError e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (TransformerException e) { // TODO Auto-generated catch block e.printStackTrace(); } return result.getWriter().toString(); } // // this method is just for testing. Do not use!!! // protected String submit(String host, String factoryType, Document jsdl, // GSSCredential credential) { // // JobDescriptionType jobDesc = null; // String submittedJobDesc = null; // try { // submittedJobDesc = createJobSubmissionDescription(jsdl); // jobDesc = RSLHelper.readRSL(submittedJobDesc); // // } catch (RSLParseException e) { // // TODO Auto-generated catch block // e.printStackTrace(); // return null; // } // // /* // * Job test parameters (adjust to your needs) // */ // // remote host // //String contact = "ng2.vpac.org"; // // // Factory type: Fork, Condor, PBS, LSF // //String factoryType = ManagedJobFactoryConstants.FACTORY_TYPE.FORK; // // String factoryType = ManagedJobFactoryConstants.FACTORY_TYPE.PBS; // // // Deafult Security: Host authorization + XML encryption // Authorization authz = HostAuthorization.getInstance(); // Integer xmlSecurity = Constants.ENCRYPTION; // // // Submission mode: batch = will not wait // boolean batchMode = true; // // // a Simple command executable (if no job file) // String simpleJobCommandLine = null; // // // Job timeout values: duration, termination times // Date serviceDuration = null; // Date serviceTermination = null; // int timeout = GramJob.DEFAULT_TIMEOUT; // // String handle = null; // try { // // if ( credential == null || credential.getRemainingLifetime() < 1 ) { // throw new NoValidCredentialException("Credential is not valid."); // } // // GramClient gram = new GramClient(credential); // // handle = gram.submitRSL(getFactoryEPR(host,factoryType) // , simpleJobCommandLine, jobDesc // , authz, xmlSecurity // , batchMode, false, false // , serviceDuration, serviceTermination, timeout ); // // } catch (Exception e) { // //TODO handle that // e.printStackTrace(); // } // // //job.setSubmittedJobDescription(submittedJobDesc); // // myLogger.debug("Submitted rsl job // description:\n--------------------------------"); // myLogger.debug(submittedJobDesc); // // return handle; // } /* * (non-Javadoc) * * @see org.vpac.grisu.js.control.job.JobSubmitter#submit(java.lang.String, * org.vpac.grisu.js.model.Job) */ protected String submit(ServiceInterface serviceInterface, String host, String factoryType, Job job) throws ServerJobSubmissionException { JobDescriptionType jobDesc = null; String submittedJobDesc = null; try { // String site = informationManager.getSiteForHostOrUrl(host); submittedJobDesc = createJobSubmissionDescription(serviceInterface, job.getJobDescription()); jobDesc = RSLHelper.readRSL(submittedJobDesc); } catch (RSLParseException e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } /* * Job test parameters (adjust to your needs) */ // remote host // String contact = "ng2.vpac.org"; // Factory type: Fork, Condor, PBS, LSF // String factoryType = ManagedJobFactoryConstants.FACTORY_TYPE.FORK; // String factoryType = ManagedJobFactoryConstants.FACTORY_TYPE.PBS; // Deafult Security: Host authorization + XML encryption Authorization authz = HostAuthorization.getInstance(); Integer xmlSecurity = Constants.ENCRYPTION; // Submission mode: batch = will not wait boolean batchMode = true; // a Simple command executable (if no job file) String simpleJobCommandLine = null; // Job timeout values: duration, termination times Date serviceDuration = null; Date serviceTermination = null; int timeout = GramJob.DEFAULT_TIMEOUT; String handle = null; try { GSSCredential credential = null; credential = CredentialHelpers.convertByteArrayToGSSCredential(job .getCredential().getCredentialData()); if (credential == null || credential.getRemainingLifetime() < 1) { throw new NoValidCredentialException( "Credential associated with job: " + job.getDn() + " / " + job.getJobname() + " is not valid."); } GramClient gram = new GramClient(credential); handle = gram.submitRSL(getFactoryEPR(host, factoryType), simpleJobCommandLine, jobDesc, authz, xmlSecurity, batchMode, false, false, serviceDuration, serviceTermination, timeout); } catch (Exception e) { // TODO handle that e.printStackTrace(); if (handle == null) { // TODO } } job.setSubmittedJobDescription(submittedJobDesc); // for debug purposes if (ServerPropertiesManager.getDebugModeOn()) { String uid = handle.substring(handle.indexOf("?") + 1); String hostname = host.substring(0, host .indexOf(":8443/wsrf/services/ManagedJobFactoryService")); String eprString = "<ns00:EndpointReferenceType xmlns:ns00=\"http://schemas.xmlsoap.org/ws/2004/03/addressing\">\n" + "<ns00:Address>" + hostname + ":8443/wsrf/services/ManagedExecutableJobService</ns00:Address>\n" + "<ns00:ReferenceProperties><ResourceID xmlns=\"http://www.globus.org/namespaces/2004/10/gram/job\">" + uid + "</ResourceID></ns00:ReferenceProperties>\n" + "<wsa:ReferenceParameters xmlns:wsa=\"http://schemas.xmlsoap.org/ws/2004/03/addressing\"/>\n" + "</ns00:EndpointReferenceType>"; try { myLogger.debug("Writing out epr file."); String vo = job.getFqan(); if (vo == null || "".equals(vo)) vo = "non_vo"; else vo = vo.replace("/", "_"); String uFileName = ServerPropertiesManager.getDebugDirectory() + "/" + job.getDn().replace("=", "_").replace(",", "_") .replace(" ", "_") + "_" + job.getJobname() + "_" + vo + "_" + job.hashCode(); FileWriter fileWriter = new FileWriter(uFileName + ".epr"); BufferedWriter buffWriter = new BufferedWriter(fileWriter); buffWriter.write(eprString); buffWriter.close(); FileWriter fileWriter2 = new FileWriter(uFileName + ".rsl"); buffWriter = new BufferedWriter(fileWriter2); buffWriter.write(submittedJobDesc); buffWriter.close(); FileWriter fileWriter3 = new FileWriter(uFileName + ".jsdl"); buffWriter = new BufferedWriter(fileWriter3); buffWriter.write(SeveralXMLHelpers .toStringWithoutAnnoyingExceptions(job .getJobDescription())); buffWriter.close(); } catch (Exception e) { e.printStackTrace(); } } myLogger .debug("Submitted rsl job description:\n--------------------------------"); myLogger.debug(submittedJobDesc); return handle; } static private EndpointReferenceType getFactoryEPR(String contact, String factoryType) throws Exception { URL factoryUrl = ManagedJobFactoryClientHelper.getServiceURL(contact) .getURL(); myLogger.debug("Factory Url: " + factoryUrl); return ManagedJobFactoryClientHelper.getFactoryEndpoint(factoryUrl, factoryType); } @Override public String getServerEndpoint(String server) { return "https://" + server + ":8443/wsrf/services/ManagedJobFactoryService"; } private int translateToGrisuStatus(String status) { int grisu_status = Integer.MIN_VALUE; if ("Done".equals(status)) { grisu_status = JobConstants.DONE; } else if (status.startsWith("Done")) { int error = Integer.parseInt(status.substring(4)); grisu_status = JobConstants.DONE + error; } else if ("StageIn".equals(status)) { grisu_status = JobConstants.STAGE_IN; } else if ("Pending".equals(status)) { grisu_status = JobConstants.PENDING; } else if ("Unsubmitted".equals(status)) { grisu_status = JobConstants.UNSUBMITTED; } else if ("Active".equals(status)) { grisu_status = JobConstants.ACTIVE; } else if ("CleanUp".equals(status)) { grisu_status = JobConstants.CLEAN_UP; } else if ("NoSuchJob".equals(status)) { grisu_status = JobConstants.NO_SUCH_JOB; } else if (status != null && status.startsWith("Failed")) { grisu_status = JobConstants.FAILED; } else { grisu_status = Integer.MAX_VALUE; } return grisu_status; } /* * (non-Javadoc) * * @see * org.vpac.grisu.js.control.job.JobSubmitter#getJobStatus(java.lang.String, * org.vpac.grisu.credential.model.ProxyCredential) */ public int getJobStatus(String endPointReference, ProxyCredential cred) { String status = null; int grisu_status = Integer.MIN_VALUE; status = GramClient.getJobStatus(endPointReference, cred .getGssCredential()); grisu_status = translateToGrisuStatus(status); return grisu_status; } /* * (non-Javadoc) * * @see org.vpac.grisu.js.control.job.JobSubmitter#killJob(java.lang.String, * org.vpac.grisu.credential.model.ProxyCredential) */ public int killJob(String endPointReference, ProxyCredential cred) { String status = null; int grisu_status = Integer.MIN_VALUE; status = GramClient.destroyJob(endPointReference, cred .getGssCredential()); grisu_status = translateToGrisuStatus(status); if (grisu_status == JobConstants.NO_SUCH_JOB) return JobConstants.KILLED; return grisu_status; } // public static void main (String[] args) { // // GT4Submitter submitter = new GT4Submitter(); // Document jsdl = SeveralXMLHelpers.loadXMLFile(new // File("/home/markus/Desktop/sleep.jsdl")); // // String rsl = submitter.createJobSubmissionDescription(jsdl); // // GSSCredential credential = null; // try { // GlobusCredential proxy = CredentialHelpers.loadGlobusCredential(new // File("/tmp/x509up_u1000")); // credential = CredentialHelpers.wrapGlobusCredential(proxy); // } catch (Exception e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } // // // String handle = submitter.submit("ng2.sapac.edu.au", // ManagedJobFactoryConstants.FACTORY_TYPE.PBS, jsdl, credential); // // String handle = submitter.submit("ng2dev.vpac.monash.edu.au", // ManagedJobFactoryConstants.FACTORY_TYPE.PBS, jsdl, credential); // // String handle = submitter.submit("ng2dev.vpac.org", // ManagedJobFactoryConstants.FACTORY_TYPE.PBS, jsdl, credential); // // String handle = submitter.submit("ng2.hpcu.uq.edu.au", // ManagedJobFactoryConstants.FACTORY_TYPE.PBS, jsdl, credential); // System.out.println("Handle of job: "+handle); // // String uid = handle.substring(handle.indexOf("?")+1); // System.out.println("Uid: "+uid); // // String eprString = "<ns00:EndpointReferenceType // xmlns:ns00=\"http://schemas.xmlsoap.org/ws/2004/03/addressing\">\n<ns00:Address>https://ng2dev.vpac.org:8443/wsrf/services/ManagedExecutableJobService</ns00:Address>\n<ns00:ReferenceProperties><ResourceID // xmlns=\"http://www.globus.org/namespaces/2004/10/gram/job\">"+uid+"</ResourceID></ns00:ReferenceProperties>\n<wsa:ReferenceParameters // xmlns:wsa=\"http://schemas.xmlsoap.org/ws/2004/03/addressing\"/>\n</ns00:EndpointReferenceType>"; // // String eprString = "<ns00:EndpointReferenceType // xmlns:ns00=\"http://schemas.xmlsoap.org/ws/2004/03/addressing\"><ns00:Address>https://ng2.hpcu.uq.edu.au:8443/wsrf/services/ManagedExecutableJobService</ns00:Address><ns00:ReferenceProperties><ResourceID // xmlns=\"http://www.globus.org/namespaces/2004/10/gram/job\">"+uid+"</ResourceID></ns00:ReferenceProperties><wsa:ReferenceParameters // xmlns:wsa=\"http://schemas.xmlsoap.org/ws/2004/03/addressing\"/></ns00:EndpointReferenceType>"; // // String eprString = "<ns00:EndpointReferenceType // xmlns:ns00=\"http://schemas.xmlsoap.org/ws/2004/03/addressing\"><ns00:Address>https://ng2.sapac.edu.au:8443/wsrf/services/ManagedExecutableJobService</ns00:Address><ns00:ReferenceProperties><ResourceID // xmlns=\"http://www.globus.org/namespaces/2004/10/gram/job\">"+uid+"</ResourceID></ns00:ReferenceProperties><wsa:ReferenceParameters // xmlns:wsa=\"http://schemas.xmlsoap.org/ws/2004/03/addressing\"/></ns00:EndpointReferenceType>"; // String eprString = "<ns00:EndpointReferenceType // xmlns:ns00=\"http://schemas.xmlsoap.org/ws/2004/03/addressing\"><ns00:Address>https://ng2dev.vpac.monash.edu.au:8443/wsrf/services/ManagedExecutableJobService</ns00:Address><ns00:ReferenceProperties><ResourceID // xmlns=\"http://www.globus.org/namespaces/2004/10/gram/job\">"+uid+"</ResourceID></ns00:ReferenceProperties><wsa:ReferenceParameters // xmlns:wsa=\"http://schemas.xmlsoap.org/ws/2004/03/addressing\"/></ns00:EndpointReferenceType>"; // // try { // FileWriter fileWriter = new // FileWriter("/home/markus/Desktop/test44.epr"); // BufferedWriter buffWriter = new BufferedWriter(fileWriter); // buffWriter.write(eprString); // // buffWriter.close(); // } catch (Exception e) { // e.printStackTrace(); // } // // } }
true
true
private String createJobSubmissionDescription( ServiceInterface serviceInterface, Document jsdl) throws ServerJobSubmissionException { DebugUtils.jsdlDebugOutput("Before translating into rsl: ", jsdl); Document output = null; try { DocumentBuilderFactory docFactory = DocumentBuilderFactory .newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); output = docBuilder.newDocument(); } catch (ParserConfigurationException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } // Add root element Element job = output.createElement("job"); output.appendChild(job); // Add "executable" node Element executable = output.createElement("executable"); executable.setTextContent(JsdlHelpers.getPosixApplication(jsdl)); job.appendChild(executable); // Add "argument"s String[] arguments = JsdlHelpers.getPosixApplicationArguments(jsdl); for (String argument : arguments) { if (argument != null && !"".equals(argument.trim())) { Element argument_node = output.createElement("argument"); argument_node.setTextContent(argument); job.appendChild(argument_node); } } // Add "directory" Element directory = output.createElement("directory"); directory.setTextContent(JsdlHelpers.getWorkingDirectory(jsdl)); job.appendChild(directory); // "stdin" element if available String stdinValue = JsdlHelpers.getPosixStandardInput(jsdl); if (stdinValue != null && !"".equals(stdinValue)) { Element stdin = output.createElement("stdin"); stdin.setTextContent(stdinValue); job.appendChild(stdin); } // Add "stdout" Element stdout = output.createElement("stdout"); stdout.setTextContent(JsdlHelpers.getPosixStandardOutput(jsdl)); job.appendChild(stdout); // Add "stderr" Element stderr = output.createElement("stderr"); stderr.setTextContent(JsdlHelpers.getPosixStandardError(jsdl)); job.appendChild(stderr); // Add "queue" node // TODO change that once I know how to specify queues in jsdl String queue = JsdlHelpers.getCandidateHosts(jsdl)[0]; // TODO this // always uses // the first // candidate // host - not // good if (queue.indexOf(":") != -1) { queue = queue.substring(0, queue.indexOf(":")); Element queue_node = output.createElement("queue"); queue_node.setTextContent(queue); job.appendChild(queue_node); } // Add "jobtype" if mpi int processorCount = JsdlHelpers.getProcessorCount(jsdl); Element jobType = output.createElement("jobType"); String jobTypeString = JsdlHelpers.getJobType(jsdl); if (processorCount > 1) { Element count = output.createElement("count"); count.setTextContent(new Integer(processorCount).toString()); job.appendChild(count); if (jobTypeString == null) { jobType.setTextContent("mpi"); } else { jobType.setTextContent(jobTypeString); } } else { if (jobTypeString == null) { jobType.setTextContent("single"); } else { jobType.setTextContent(jobTypeString); } } job.appendChild(jobType); // total memory Long memory = JsdlHelpers.getTotalMemoryRequirement(jsdl); if (memory != null && memory >= 0) { Element totalMemory = output.createElement("maxMemory"); // convert from bytes to mb memory = memory / 1024; totalMemory.setTextContent(memory.toString()); job.appendChild(totalMemory); } // Add "maxWallTime" node int walltime = JsdlHelpers.getWalltime(jsdl); if (walltime > 0) { Element maxWallTime = output.createElement("maxWallTime"); int wt = new Integer(JsdlHelpers.getWalltime(jsdl)); // convert to minutes wt = wt / 60; maxWallTime.setTextContent(new Integer(wt).toString()); job.appendChild(maxWallTime); } Element fileStageIn = output.createElement("fileStageIn"); // stage ins // Map<String, String> stageIns = JsdlHelpers.getStageIns(jsdl); // // only append stageIns element if not 0 because globus will reject // the job // if there is an empyt <stageIns> tag // if ( stageIns.size() > 0 ) { // for ( String source : stageIns.keySet() ) { // Element stageIn = output.createElement("transfer"); // Element sourceURL = output.createElement("sourceUrl"); // sourceURL.setTextContent(source); // stageIn.appendChild(sourceURL); // Element targetURL = output.createElement("destinationUrl"); // targetURL.setTextContent(stageIns.get(source)); // stageIn.appendChild(targetURL); // // fileStageIn.appendChild(stageIn); // } // job.appendChild(fileStageIn); // } // Extensions Element extensions = output.createElement("extensions"); // jobname Element jobname = output.createElement("jobname"); String jobname_string = JsdlHelpers.getJobname(jsdl); // because of some pbs restrictions we have to keep the jobname to 6 // chars if (jobname_string.length() > 6) { jobname.setTextContent(jobname_string.substring(jobname_string .length() - 6)); } // jobname.setTextContent(jobname_string); extensions.appendChild(jobname); // module -- old style String[] modules_string = null; try { modules_string = JsdlHelpers.getModules(jsdl); } catch (Exception e) { // doesn't matter } if (modules_string != null && modules_string.length == 0) { for (String module_string : modules_string) { if (!"".equals(module_string)) { Element module = output.createElement("module"); module.setTextContent(module_string); extensions.appendChild(module); } } } else { // try to determine module to load from mds -- this will be the // default way of doing it later on and the module element will // disappear // it was stupid in the first place to have it... String application = JsdlHelpers.getApplicationName(jsdl); String version = JsdlHelpers.getApplicationVersion(jsdl); String subLoc = JsdlHelpers.getCandidateHosts(jsdl)[0]; if (application != null || version != null || subLoc != null) { Map<String, String> appDetails = serviceInterface .getApplicationDetails(application, version, subLoc); String modulesString = appDetails .get(JobConstants.MDS_MODULES_KEY); if ( modules_string == null || "".equals(modules_string) ) { myLogger.warn("No module for this application/version/submissionLocation found. Submitting nonetheless..."); } if (modulesString != null && modulesString.length() > 0) { modules_string = appDetails.get( JobConstants.MDS_MODULES_KEY).split(","); for (String module_string : modules_string) { if (!"".equals(module_string)) { Element module = output.createElement("module"); module.setTextContent(module_string); extensions.appendChild(module); } } } } else { throw new ServerJobSubmissionException("Can't determine either application, version or submissionLocation."); } } // email String email = JsdlHelpers.getEmail(jsdl); if (email != null && !"".equals(email)) { Element email_address = output.createElement("email_address"); email_address.setTextContent(email); extensions.appendChild(email_address); if (JsdlHelpers.sendEmailOnJobStart(jsdl)) { Element emailonexecution = output .createElement("emailonexecution"); emailonexecution.setTextContent("yes"); extensions.appendChild(emailonexecution); } if (JsdlHelpers.sendEmailOnJobFinish(jsdl)) { Element emailonabort = output.createElement("emailonabort"); emailonabort.setTextContent("yes"); Element emailontermination = output .createElement("emailontermination"); emailontermination.setTextContent("yes"); extensions.appendChild(emailonabort); extensions.appendChild(emailontermination); } } job.appendChild(extensions); // initialize StreamResult with InputFile object to save to file StreamResult result = null; try { Transformer transformer = TransformerFactory.newInstance() .newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); result = new StreamResult(new StringWriter()); DOMSource source = new DOMSource(output); transformer.transform(source, result); } catch (TransformerConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (TransformerFactoryConfigurationError e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (TransformerException e) { // TODO Auto-generated catch block e.printStackTrace(); } return result.getWriter().toString(); }
private String createJobSubmissionDescription( ServiceInterface serviceInterface, Document jsdl) throws ServerJobSubmissionException { DebugUtils.jsdlDebugOutput("Before translating into rsl: ", jsdl); Document output = null; try { DocumentBuilderFactory docFactory = DocumentBuilderFactory .newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); output = docBuilder.newDocument(); } catch (ParserConfigurationException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } // Add root element Element job = output.createElement("job"); output.appendChild(job); // Add "executable" node Element executable = output.createElement("executable"); executable.setTextContent(JsdlHelpers.getPosixApplication(jsdl)); job.appendChild(executable); // Add "argument"s String[] arguments = JsdlHelpers.getPosixApplicationArguments(jsdl); for (String argument : arguments) { if (argument != null && !"".equals(argument.trim())) { Element argument_node = output.createElement("argument"); argument_node.setTextContent(argument); job.appendChild(argument_node); } } // Add "directory" Element directory = output.createElement("directory"); directory.setTextContent(JsdlHelpers.getWorkingDirectory(jsdl)); job.appendChild(directory); // "stdin" element if available String stdinValue = JsdlHelpers.getPosixStandardInput(jsdl); if (stdinValue != null && !"".equals(stdinValue)) { Element stdin = output.createElement("stdin"); stdin.setTextContent(stdinValue); job.appendChild(stdin); } // Add "stdout" Element stdout = output.createElement("stdout"); stdout.setTextContent(JsdlHelpers.getPosixStandardOutput(jsdl)); job.appendChild(stdout); // Add "stderr" Element stderr = output.createElement("stderr"); stderr.setTextContent(JsdlHelpers.getPosixStandardError(jsdl)); job.appendChild(stderr); // Add "queue" node // TODO change that once I know how to specify queues in jsdl String queue = JsdlHelpers.getCandidateHosts(jsdl)[0]; // TODO this // always uses // the first // candidate // host - not // good if (queue.indexOf(":") != -1) { queue = queue.substring(0, queue.indexOf(":")); Element queue_node = output.createElement("queue"); queue_node.setTextContent(queue); job.appendChild(queue_node); } // Add "jobtype" if mpi int processorCount = JsdlHelpers.getProcessorCount(jsdl); Element jobType = output.createElement("jobType"); String jobTypeString = JsdlHelpers.getJobType(jsdl); if (processorCount > 1) { Element count = output.createElement("count"); count.setTextContent(new Integer(processorCount).toString()); job.appendChild(count); if (jobTypeString == null) { jobType.setTextContent("mpi"); } else { jobType.setTextContent(jobTypeString); } } else { if (jobTypeString == null) { jobType.setTextContent("single"); } else { jobType.setTextContent(jobTypeString); } } job.appendChild(jobType); // total memory Long memory = JsdlHelpers.getTotalMemoryRequirement(jsdl); if (memory != null && memory >= 0) { Element totalMemory = output.createElement("maxMemory"); // convert from bytes to mb memory = memory / 1024; totalMemory.setTextContent(memory.toString()); job.appendChild(totalMemory); } // Add "maxWallTime" node int walltime = JsdlHelpers.getWalltime(jsdl); if (walltime > 0) { Element maxWallTime = output.createElement("maxWallTime"); int wt = new Integer(JsdlHelpers.getWalltime(jsdl)); // convert to minutes wt = wt / 60; maxWallTime.setTextContent(new Integer(wt).toString()); job.appendChild(maxWallTime); } Element fileStageIn = output.createElement("fileStageIn"); // stage ins // Map<String, String> stageIns = JsdlHelpers.getStageIns(jsdl); // // only append stageIns element if not 0 because globus will reject // the job // if there is an empyt <stageIns> tag // if ( stageIns.size() > 0 ) { // for ( String source : stageIns.keySet() ) { // Element stageIn = output.createElement("transfer"); // Element sourceURL = output.createElement("sourceUrl"); // sourceURL.setTextContent(source); // stageIn.appendChild(sourceURL); // Element targetURL = output.createElement("destinationUrl"); // targetURL.setTextContent(stageIns.get(source)); // stageIn.appendChild(targetURL); // // fileStageIn.appendChild(stageIn); // } // job.appendChild(fileStageIn); // } // Extensions Element extensions = output.createElement("extensions"); // jobname Element jobname = output.createElement("jobname"); String jobname_string = JsdlHelpers.getJobname(jsdl); // because of some pbs restrictions we have to keep the jobname to 6 // chars if (jobname_string.length() > 6) { jobname.setTextContent(jobname_string.substring(jobname_string .length() - 6)); } // jobname.setTextContent(jobname_string); extensions.appendChild(jobname); // module -- old style String[] modules_string = null; try { modules_string = JsdlHelpers.getModules(jsdl); } catch (Exception e) { // doesn't matter } if (modules_string != null && modules_string.length > 0) { for (String module_string : modules_string) { if (!"".equals(module_string)) { Element module = output.createElement("module"); module.setTextContent(module_string); extensions.appendChild(module); } } } else { // try to determine module to load from mds -- this will be the // default way of doing it later on and the module element will // disappear // it was stupid in the first place to have it... String application = JsdlHelpers.getApplicationName(jsdl); String version = JsdlHelpers.getApplicationVersion(jsdl); String subLoc = JsdlHelpers.getCandidateHosts(jsdl)[0]; if (application != null || version != null || subLoc != null) { Map<String, String> appDetails = serviceInterface .getApplicationDetails(application, version, subLoc); String modulesString = appDetails .get(JobConstants.MDS_MODULES_KEY); if ( modules_string == null || "".equals(modules_string) ) { myLogger.warn("No module for this application/version/submissionLocation found. Submitting nonetheless..."); } if (modulesString != null && modulesString.length() > 0) { modules_string = appDetails.get( JobConstants.MDS_MODULES_KEY).split(","); for (String module_string : modules_string) { if (!"".equals(module_string)) { Element module = output.createElement("module"); module.setTextContent(module_string); extensions.appendChild(module); } } } } else { throw new ServerJobSubmissionException("Can't determine either application, version or submissionLocation."); } } // email String email = JsdlHelpers.getEmail(jsdl); if (email != null && !"".equals(email)) { Element email_address = output.createElement("email_address"); email_address.setTextContent(email); extensions.appendChild(email_address); if (JsdlHelpers.sendEmailOnJobStart(jsdl)) { Element emailonexecution = output .createElement("emailonexecution"); emailonexecution.setTextContent("yes"); extensions.appendChild(emailonexecution); } if (JsdlHelpers.sendEmailOnJobFinish(jsdl)) { Element emailonabort = output.createElement("emailonabort"); emailonabort.setTextContent("yes"); Element emailontermination = output .createElement("emailontermination"); emailontermination.setTextContent("yes"); extensions.appendChild(emailonabort); extensions.appendChild(emailontermination); } } job.appendChild(extensions); // initialize StreamResult with InputFile object to save to file StreamResult result = null; try { Transformer transformer = TransformerFactory.newInstance() .newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); result = new StreamResult(new StringWriter()); DOMSource source = new DOMSource(output); transformer.transform(source, result); } catch (TransformerConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (TransformerFactoryConfigurationError e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (TransformerException e) { // TODO Auto-generated catch block e.printStackTrace(); } return result.getWriter().toString(); }
diff --git a/src/gov/nih/nci/rembrandt/web/xml/ClinicalSampleReport.java b/src/gov/nih/nci/rembrandt/web/xml/ClinicalSampleReport.java index adcf4fe1..d8d724b1 100755 --- a/src/gov/nih/nci/rembrandt/web/xml/ClinicalSampleReport.java +++ b/src/gov/nih/nci/rembrandt/web/xml/ClinicalSampleReport.java @@ -1,800 +1,804 @@ package gov.nih.nci.rembrandt.web.xml; import gov.nih.nci.rembrandt.queryservice.resultset.DimensionalViewContainer; import gov.nih.nci.rembrandt.queryservice.resultset.Resultant; import gov.nih.nci.rembrandt.queryservice.resultset.ResultsContainer; import gov.nih.nci.rembrandt.queryservice.resultset.sample.SampleResultset; import gov.nih.nci.rembrandt.queryservice.resultset.sample.SampleViewResultsContainer; import gov.nih.nci.rembrandt.util.DEUtils; import java.text.DecimalFormat; import java.util.Collection; import java.util.Iterator; import java.util.Map; import org.apache.log4j.Logger; import org.dom4j.Document; import org.dom4j.DocumentHelper; import org.dom4j.Element; /** * @author LandyR * Feb 8, 2005 * */ public class ClinicalSampleReport implements ReportGenerator { /** * */ public ClinicalSampleReport () { super(); } /* (non-Javadoc) * @see gov.nih.nci.nautilus.ui.report.ReportGenerator#getTemplate(gov.nih.nci.nautilus.resultset.Resultant, java.lang.String) */ public Document getReportXML(Resultant resultant, Map filterMapParams) { //String theColors[] = { "B6C5F2","F2E3B5","DAE1F9","C4F2B5","819BE9", "E9CF81" }; DecimalFormat resultFormat = new DecimalFormat("0.0000"); String defaultV = "-"; Document document = DocumentHelper.createDocument(); try { Element report = document.addElement( "Report" ); Element cell = null; Element data = null; Element dataRow = null; //add the atts report.addAttribute("reportType", "Clinical"); //fudge these for now report.addAttribute("groupBy", "none"); String queryName = resultant.getAssociatedQuery().getQueryName(); //set the queryName to be unique for session/cache access report.addAttribute("queryName", queryName); report.addAttribute("sessionId", "the session id"); report.addAttribute("creationTime", "right now"); boolean gLinks = false; boolean cLinks = false; StringBuffer sb = new StringBuffer(); ResultsContainer resultsContainer = resultant.getResultsContainer(); SampleViewResultsContainer sampleViewContainer = null; if(resultsContainer instanceof DimensionalViewContainer) { DimensionalViewContainer dimensionalViewContainer = (DimensionalViewContainer) resultsContainer; // Are we making hyperlinks? if(dimensionalViewContainer.getGeneExprSingleViewContainer() != null) { // show the geneExprHyperlinks gLinks = true; } if(dimensionalViewContainer.getCopyNumberSingleViewContainer() != null) { // show the copyNumberHyperlinks cLinks = true; } sampleViewContainer = dimensionalViewContainer.getSampleViewResultsContainer(); } else if (resultsContainer instanceof SampleViewResultsContainer) { sampleViewContainer = (SampleViewResultsContainer) resultsContainer; } Collection samples = sampleViewContainer.getSampleResultsets(); /* sb.append("<div class=\"rowCount\">"+helpFul+samples.size()+" records returned &nbsp;&nbsp;&nbsp;" + links + "</div>\n"); sb.append("<table cellpadding=\"0\" cellspacing=\"0\">\n"); */ // set up the headers for this table Element headerRow = report.addElement("Row").addAttribute("name", "headerRow"); cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Sample"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Age at Dx (years)"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Gender"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Survival (months)"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Disease"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Grade"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Race"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Karnofsky"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Lansky"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Neurological Exam Outcome"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("MRI Desc"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Clinical Evaluation Time Point"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Followup Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Followup Month"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Neuro Evaluation Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Steroid Dose Status"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Anti-Convulsant Status"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Time Point"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Site"); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Dose Start Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Dose Stop Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Fraction Dose"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Fraction Number"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Type"); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Time Point"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Agent ID"); data = null; cell = null; */ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Agent Name"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Course Count"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Dose Start Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Dose Stop Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Study Source"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Protocol Number"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Time Point"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Procedure Title"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Tumor Histology"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Outcome"); data = null; cell = null; // starting onstudy areas /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Time Point"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Site"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Dose Start Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Dose Stop Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Neurosis Status"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Fraction Dose"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Fraction Number"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Type"); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Time Point"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Agent ID"); data = null; cell = null; */ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Agent Name"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Regimen Number"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Course Count"); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Dose Start Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Dose Stop Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Study Source"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Protocol Number"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Time Point"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Procedure Title"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Indication "); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Histo Diagnosis "); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Outcome"); data = null; cell = null; //sb.append("<Tr><Td id=\"header\">SAMPLE</td><td id=\"header\">AGE at Dx (years)</td><td id=\"header\">GENDER</td><td id=\"header\">SURVIVAL (months)</td><td id=\"header\">DISEASE</td>"); Iterator si = samples.iterator(); if(si.hasNext()) { SampleResultset sampleResultset = (SampleResultset)si.next(); if(sampleResultset.getGeneExprSingleViewResultsContainer() != null) { cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("GeneExp"); data = null; cell = null; //sb.append("<Td id=\"header\">GeneExp</td>"); } if(sampleResultset.getCopyNumberSingleViewResultsContainer()!= null) { cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("CopyNumber"); data = null; cell = null; //sb.append("<td id=\"header\">CopyNumber</td>"); } //sb.append("</tr>\n"); } for (Iterator sampleIterator = samples.iterator(); sampleIterator.hasNext();) { SampleResultset sampleResultset = (SampleResultset)sampleIterator.next(); dataRow = report.addElement("Row").addAttribute("name", "dataRow"); cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "sample").addAttribute("group", "sample"); data = cell.addElement("Data").addAttribute("type", "data").addText(sampleResultset.getSampleIDDE().getValue().toString()); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getAgeGroup())); data = null; cell = null; String theGender = defaultV; if(!DEUtils.checkNV(sampleResultset.getGenderCode()).equalsIgnoreCase("O")) theGender = DEUtils.checkNV(sampleResultset.getGenderCode()); cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(theGender); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getSurvivalLengthRange())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getDisease())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getWhoGrade())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getRaceDE())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getKarnofskyClinicalEvalDE())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getLanskyClinicalEvalDE())); data = null; cell = null;*/ /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getNeuroExamClinicalEvalDE())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getMriClinicalEvalDE())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getNeuroExamDescs())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getMriScoreDescs())); data = null; cell = null; /* cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getTimePoints())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getFollowupDates())); data = null; cell = null; */ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getFollowupMonths())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getNeuroEvaluationDates())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getSteroidDoseStatuses())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getAntiConvulsantStatuses())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationTimePoints())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationRadiationSites())); data = null; cell = null; /* cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationDoseStartDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationDoseStopDates())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationFractionDoses())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationFractionNumbers())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationRadiationTypes())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoTimePoints())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoagentIds())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoAgentNames())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoCourseCounts())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoDoseStartDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoDoseStopDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoStudySources())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoProtocolNumbers())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryTimePoints())); data = null; cell = null; */ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryProcedureTitles())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryTumorHistologys())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgerySurgeryDates())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgerySurgeryOutcomes())); data = null; cell = null; // starting onstudy /* cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationTimePoints())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationRadiationSites())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationDoseStartDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationDoseStopDates())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationFractionDoses())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationFractionNumbers())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationNeurosisStatuses())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationRadiationTypes())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoTimePoints())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoagentIds())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoAgentNames())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoRegimenNumbers())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoCourseCounts())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoDoseStartDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoDoseStopDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoStudySources())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoProtocolNumbers())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryTimePoints())); data = null; cell = null; */ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryProcedureTitles())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryIndications())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryHistoDiagnoses())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgerySurgeryDates())); data = null; cell = null; */ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgerySurgeryOutcomes())); data = null; cell = null; /* sb.append("<tr><td>"+sampleResultset.getBiospecimen().getValue().toString().substring(2)+ "</td>" + "<Td>"+sampleResultset.getAgeGroup().getValue()+ "</td>" + "<td>"+sampleResultset.getGenderCode().getValue()+ "</td>" + "<td>"+sampleResultset.getSurvivalLengthRange().getValue()+ "</td>" + "<Td>"+sampleResultset.getDisease().getValue() + "</td>"); */ if(sampleResultset.getGeneExprSingleViewResultsContainer() != null) { //TODO: create the links cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText("G"); data = null; cell = null; //sb.append("<td><a href=\"report.do?s="+sampleName+"_gene&report=gene\">G</a></td>"); } + /* else if (gLinks){ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); - data = cell.addElement("Data").addAttribute("type", "data").addText(" "); + data = cell.addElement("Data").addAttribute("type", "data").addText("-"); data = null; cell = null; //sb.append("<td>&nbsp;</td>"); //empty cell } + */ if(sampleResultset.getCopyNumberSingleViewResultsContainer()!= null) { // TODO: create the links cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText("C"); data = null; cell = null; //sb.append("<Td><a href=\"report.do?s="+sampleName +"_copy&report=copy\">C</a></td>"); } + /* else if (cLinks){ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); - data = cell.addElement("Data").addAttribute("type", "data").addText(" "); + data = cell.addElement("Data").addAttribute("type", "data").addText("-"); data = null; cell = null; //sb.append("<td>&nbsp;</td>"); //empty cell } + */ //report.append("row", row); //sb.append("</tr>\n"); } //sb.append("</table>\n<br>"); //return sb.toString(); } catch(Exception e) { //asdf System.out.println(e); } return document; } }
false
true
public Document getReportXML(Resultant resultant, Map filterMapParams) { //String theColors[] = { "B6C5F2","F2E3B5","DAE1F9","C4F2B5","819BE9", "E9CF81" }; DecimalFormat resultFormat = new DecimalFormat("0.0000"); String defaultV = "-"; Document document = DocumentHelper.createDocument(); try { Element report = document.addElement( "Report" ); Element cell = null; Element data = null; Element dataRow = null; //add the atts report.addAttribute("reportType", "Clinical"); //fudge these for now report.addAttribute("groupBy", "none"); String queryName = resultant.getAssociatedQuery().getQueryName(); //set the queryName to be unique for session/cache access report.addAttribute("queryName", queryName); report.addAttribute("sessionId", "the session id"); report.addAttribute("creationTime", "right now"); boolean gLinks = false; boolean cLinks = false; StringBuffer sb = new StringBuffer(); ResultsContainer resultsContainer = resultant.getResultsContainer(); SampleViewResultsContainer sampleViewContainer = null; if(resultsContainer instanceof DimensionalViewContainer) { DimensionalViewContainer dimensionalViewContainer = (DimensionalViewContainer) resultsContainer; // Are we making hyperlinks? if(dimensionalViewContainer.getGeneExprSingleViewContainer() != null) { // show the geneExprHyperlinks gLinks = true; } if(dimensionalViewContainer.getCopyNumberSingleViewContainer() != null) { // show the copyNumberHyperlinks cLinks = true; } sampleViewContainer = dimensionalViewContainer.getSampleViewResultsContainer(); } else if (resultsContainer instanceof SampleViewResultsContainer) { sampleViewContainer = (SampleViewResultsContainer) resultsContainer; } Collection samples = sampleViewContainer.getSampleResultsets(); /* sb.append("<div class=\"rowCount\">"+helpFul+samples.size()+" records returned &nbsp;&nbsp;&nbsp;" + links + "</div>\n"); sb.append("<table cellpadding=\"0\" cellspacing=\"0\">\n"); */ // set up the headers for this table Element headerRow = report.addElement("Row").addAttribute("name", "headerRow"); cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Sample"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Age at Dx (years)"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Gender"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Survival (months)"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Disease"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Grade"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Race"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Karnofsky"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Lansky"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Neurological Exam Outcome"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("MRI Desc"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Clinical Evaluation Time Point"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Followup Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Followup Month"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Neuro Evaluation Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Steroid Dose Status"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Anti-Convulsant Status"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Time Point"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Site"); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Dose Start Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Dose Stop Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Fraction Dose"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Fraction Number"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Type"); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Time Point"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Agent ID"); data = null; cell = null; */ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Agent Name"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Course Count"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Dose Start Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Dose Stop Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Study Source"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Protocol Number"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Time Point"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Procedure Title"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Tumor Histology"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Outcome"); data = null; cell = null; // starting onstudy areas /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Time Point"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Site"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Dose Start Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Dose Stop Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Neurosis Status"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Fraction Dose"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Fraction Number"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Type"); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Time Point"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Agent ID"); data = null; cell = null; */ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Agent Name"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Regimen Number"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Course Count"); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Dose Start Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Dose Stop Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Study Source"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Protocol Number"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Time Point"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Procedure Title"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Indication "); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Histo Diagnosis "); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Outcome"); data = null; cell = null; //sb.append("<Tr><Td id=\"header\">SAMPLE</td><td id=\"header\">AGE at Dx (years)</td><td id=\"header\">GENDER</td><td id=\"header\">SURVIVAL (months)</td><td id=\"header\">DISEASE</td>"); Iterator si = samples.iterator(); if(si.hasNext()) { SampleResultset sampleResultset = (SampleResultset)si.next(); if(sampleResultset.getGeneExprSingleViewResultsContainer() != null) { cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("GeneExp"); data = null; cell = null; //sb.append("<Td id=\"header\">GeneExp</td>"); } if(sampleResultset.getCopyNumberSingleViewResultsContainer()!= null) { cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("CopyNumber"); data = null; cell = null; //sb.append("<td id=\"header\">CopyNumber</td>"); } //sb.append("</tr>\n"); } for (Iterator sampleIterator = samples.iterator(); sampleIterator.hasNext();) { SampleResultset sampleResultset = (SampleResultset)sampleIterator.next(); dataRow = report.addElement("Row").addAttribute("name", "dataRow"); cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "sample").addAttribute("group", "sample"); data = cell.addElement("Data").addAttribute("type", "data").addText(sampleResultset.getSampleIDDE().getValue().toString()); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getAgeGroup())); data = null; cell = null; String theGender = defaultV; if(!DEUtils.checkNV(sampleResultset.getGenderCode()).equalsIgnoreCase("O")) theGender = DEUtils.checkNV(sampleResultset.getGenderCode()); cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(theGender); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getSurvivalLengthRange())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getDisease())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getWhoGrade())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getRaceDE())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getKarnofskyClinicalEvalDE())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getLanskyClinicalEvalDE())); data = null; cell = null;*/ /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getNeuroExamClinicalEvalDE())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getMriClinicalEvalDE())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getNeuroExamDescs())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getMriScoreDescs())); data = null; cell = null; /* cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getTimePoints())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getFollowupDates())); data = null; cell = null; */ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getFollowupMonths())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getNeuroEvaluationDates())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getSteroidDoseStatuses())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getAntiConvulsantStatuses())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationTimePoints())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationRadiationSites())); data = null; cell = null; /* cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationDoseStartDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationDoseStopDates())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationFractionDoses())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationFractionNumbers())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationRadiationTypes())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoTimePoints())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoagentIds())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoAgentNames())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoCourseCounts())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoDoseStartDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoDoseStopDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoStudySources())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoProtocolNumbers())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryTimePoints())); data = null; cell = null; */ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryProcedureTitles())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryTumorHistologys())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgerySurgeryDates())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgerySurgeryOutcomes())); data = null; cell = null; // starting onstudy /* cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationTimePoints())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationRadiationSites())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationDoseStartDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationDoseStopDates())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationFractionDoses())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationFractionNumbers())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationNeurosisStatuses())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationRadiationTypes())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoTimePoints())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoagentIds())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoAgentNames())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoRegimenNumbers())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoCourseCounts())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoDoseStartDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoDoseStopDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoStudySources())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoProtocolNumbers())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryTimePoints())); data = null; cell = null; */ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryProcedureTitles())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryIndications())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryHistoDiagnoses())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgerySurgeryDates())); data = null; cell = null; */ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgerySurgeryOutcomes())); data = null; cell = null; /* sb.append("<tr><td>"+sampleResultset.getBiospecimen().getValue().toString().substring(2)+ "</td>" + "<Td>"+sampleResultset.getAgeGroup().getValue()+ "</td>" + "<td>"+sampleResultset.getGenderCode().getValue()+ "</td>" + "<td>"+sampleResultset.getSurvivalLengthRange().getValue()+ "</td>" + "<Td>"+sampleResultset.getDisease().getValue() + "</td>"); */ if(sampleResultset.getGeneExprSingleViewResultsContainer() != null) { //TODO: create the links cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText("G"); data = null; cell = null; //sb.append("<td><a href=\"report.do?s="+sampleName+"_gene&report=gene\">G</a></td>"); } else if (gLinks){ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(" "); data = null; cell = null; //sb.append("<td>&nbsp;</td>"); //empty cell } if(sampleResultset.getCopyNumberSingleViewResultsContainer()!= null) { // TODO: create the links cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText("C"); data = null; cell = null; //sb.append("<Td><a href=\"report.do?s="+sampleName +"_copy&report=copy\">C</a></td>"); } else if (cLinks){ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(" "); data = null; cell = null; //sb.append("<td>&nbsp;</td>"); //empty cell } //report.append("row", row); //sb.append("</tr>\n"); } //sb.append("</table>\n<br>"); //return sb.toString(); } catch(Exception e) { //asdf System.out.println(e); } return document; }
public Document getReportXML(Resultant resultant, Map filterMapParams) { //String theColors[] = { "B6C5F2","F2E3B5","DAE1F9","C4F2B5","819BE9", "E9CF81" }; DecimalFormat resultFormat = new DecimalFormat("0.0000"); String defaultV = "-"; Document document = DocumentHelper.createDocument(); try { Element report = document.addElement( "Report" ); Element cell = null; Element data = null; Element dataRow = null; //add the atts report.addAttribute("reportType", "Clinical"); //fudge these for now report.addAttribute("groupBy", "none"); String queryName = resultant.getAssociatedQuery().getQueryName(); //set the queryName to be unique for session/cache access report.addAttribute("queryName", queryName); report.addAttribute("sessionId", "the session id"); report.addAttribute("creationTime", "right now"); boolean gLinks = false; boolean cLinks = false; StringBuffer sb = new StringBuffer(); ResultsContainer resultsContainer = resultant.getResultsContainer(); SampleViewResultsContainer sampleViewContainer = null; if(resultsContainer instanceof DimensionalViewContainer) { DimensionalViewContainer dimensionalViewContainer = (DimensionalViewContainer) resultsContainer; // Are we making hyperlinks? if(dimensionalViewContainer.getGeneExprSingleViewContainer() != null) { // show the geneExprHyperlinks gLinks = true; } if(dimensionalViewContainer.getCopyNumberSingleViewContainer() != null) { // show the copyNumberHyperlinks cLinks = true; } sampleViewContainer = dimensionalViewContainer.getSampleViewResultsContainer(); } else if (resultsContainer instanceof SampleViewResultsContainer) { sampleViewContainer = (SampleViewResultsContainer) resultsContainer; } Collection samples = sampleViewContainer.getSampleResultsets(); /* sb.append("<div class=\"rowCount\">"+helpFul+samples.size()+" records returned &nbsp;&nbsp;&nbsp;" + links + "</div>\n"); sb.append("<table cellpadding=\"0\" cellspacing=\"0\">\n"); */ // set up the headers for this table Element headerRow = report.addElement("Row").addAttribute("name", "headerRow"); cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Sample"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Age at Dx (years)"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Gender"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Survival (months)"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Disease"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Grade"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Race"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Karnofsky"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Lansky"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Neurological Exam Outcome"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("MRI Desc"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Clinical Evaluation Time Point"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Followup Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Followup Month"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Neuro Evaluation Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Steroid Dose Status"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Anti-Convulsant Status"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Time Point"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Site"); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Dose Start Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Dose Stop Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Fraction Dose"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Fraction Number"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Radiation Type"); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Time Point"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Agent ID"); data = null; cell = null; */ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Agent Name"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Course Count"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Dose Start Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Dose Stop Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Study Source"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Chemo Protocol Number"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Time Point"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Procedure Title"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Tumor Histology"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("Prior Therapy Surgery Outcome"); data = null; cell = null; // starting onstudy areas /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Time Point"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Site"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Dose Start Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Dose Stop Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Neurosis Status"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Fraction Dose"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Fraction Number"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Radiation Type"); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Time Point"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Agent ID"); data = null; cell = null; */ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Agent Name"); data = null; cell = null; /* cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Regimen Number"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Course Count"); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Dose Start Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Dose Stop Date"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Study Source"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Chemo Protocol Number"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Time Point"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Procedure Title"); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Indication "); data = null; cell = null; cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Histo Diagnosis "); data = null; cell = null; /*cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Date"); data = null; cell = null;*/ cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("OnStudy Therapy Surgery Outcome"); data = null; cell = null; //sb.append("<Tr><Td id=\"header\">SAMPLE</td><td id=\"header\">AGE at Dx (years)</td><td id=\"header\">GENDER</td><td id=\"header\">SURVIVAL (months)</td><td id=\"header\">DISEASE</td>"); Iterator si = samples.iterator(); if(si.hasNext()) { SampleResultset sampleResultset = (SampleResultset)si.next(); if(sampleResultset.getGeneExprSingleViewResultsContainer() != null) { cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("GeneExp"); data = null; cell = null; //sb.append("<Td id=\"header\">GeneExp</td>"); } if(sampleResultset.getCopyNumberSingleViewResultsContainer()!= null) { cell = headerRow.addElement("Cell").addAttribute("type", "header").addAttribute("class", "header").addAttribute("group", "header"); data = cell.addElement("Data").addAttribute("type", "header").addText("CopyNumber"); data = null; cell = null; //sb.append("<td id=\"header\">CopyNumber</td>"); } //sb.append("</tr>\n"); } for (Iterator sampleIterator = samples.iterator(); sampleIterator.hasNext();) { SampleResultset sampleResultset = (SampleResultset)sampleIterator.next(); dataRow = report.addElement("Row").addAttribute("name", "dataRow"); cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "sample").addAttribute("group", "sample"); data = cell.addElement("Data").addAttribute("type", "data").addText(sampleResultset.getSampleIDDE().getValue().toString()); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getAgeGroup())); data = null; cell = null; String theGender = defaultV; if(!DEUtils.checkNV(sampleResultset.getGenderCode()).equalsIgnoreCase("O")) theGender = DEUtils.checkNV(sampleResultset.getGenderCode()); cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(theGender); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getSurvivalLengthRange())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getDisease())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getWhoGrade())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getRaceDE())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getKarnofskyClinicalEvalDE())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getLanskyClinicalEvalDE())); data = null; cell = null;*/ /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getNeuroExamClinicalEvalDE())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNV(sampleResultset.getMriClinicalEvalDE())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getNeuroExamDescs())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getMriScoreDescs())); data = null; cell = null; /* cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getTimePoints())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getFollowupDates())); data = null; cell = null; */ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getFollowupMonths())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getNeuroEvaluationDates())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getSteroidDoseStatuses())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getAntiConvulsantStatuses())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationTimePoints())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationRadiationSites())); data = null; cell = null; /* cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationDoseStartDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationDoseStopDates())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationFractionDoses())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationFractionNumbers())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorRadiationRadiationTypes())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoTimePoints())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoagentIds())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoAgentNames())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoCourseCounts())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoDoseStartDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoDoseStopDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoStudySources())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorChemoProtocolNumbers())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryTimePoints())); data = null; cell = null; */ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryProcedureTitles())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgeryTumorHistologys())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgerySurgeryDates())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getPriorSurgerySurgeryOutcomes())); data = null; cell = null; // starting onstudy /* cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationTimePoints())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationRadiationSites())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationDoseStartDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationDoseStopDates())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationFractionDoses())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationFractionNumbers())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationNeurosisStatuses())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyRadiationRadiationTypes())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoTimePoints())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoagentIds())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoAgentNames())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoRegimenNumbers())); data = null; cell = null;*/ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoCourseCounts())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoDoseStartDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoDoseStopDates())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoStudySources())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudyChemoProtocolNumbers())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryTimePoints())); data = null; cell = null; */ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryProcedureTitles())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryIndications())); data = null; cell = null; cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgeryHistoDiagnoses())); data = null; cell = null; /*cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgerySurgeryDates())); data = null; cell = null; */ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText(DEUtils.checkNull(sampleResultset.getOnStudySurgerySurgeryOutcomes())); data = null; cell = null; /* sb.append("<tr><td>"+sampleResultset.getBiospecimen().getValue().toString().substring(2)+ "</td>" + "<Td>"+sampleResultset.getAgeGroup().getValue()+ "</td>" + "<td>"+sampleResultset.getGenderCode().getValue()+ "</td>" + "<td>"+sampleResultset.getSurvivalLengthRange().getValue()+ "</td>" + "<Td>"+sampleResultset.getDisease().getValue() + "</td>"); */ if(sampleResultset.getGeneExprSingleViewResultsContainer() != null) { //TODO: create the links cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText("G"); data = null; cell = null; //sb.append("<td><a href=\"report.do?s="+sampleName+"_gene&report=gene\">G</a></td>"); } /* else if (gLinks){ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText("-"); data = null; cell = null; //sb.append("<td>&nbsp;</td>"); //empty cell } */ if(sampleResultset.getCopyNumberSingleViewResultsContainer()!= null) { // TODO: create the links cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText("C"); data = null; cell = null; //sb.append("<Td><a href=\"report.do?s="+sampleName +"_copy&report=copy\">C</a></td>"); } /* else if (cLinks){ cell = dataRow.addElement("Cell").addAttribute("type", "data").addAttribute("class", "data").addAttribute("group", "data"); data = cell.addElement("Data").addAttribute("type", "data").addText("-"); data = null; cell = null; //sb.append("<td>&nbsp;</td>"); //empty cell } */ //report.append("row", row); //sb.append("</tr>\n"); } //sb.append("</table>\n<br>"); //return sb.toString(); } catch(Exception e) { //asdf System.out.println(e); } return document; }
diff --git a/src/main/java/org/sikuli/slides/sikuli/SlideTutorial.java b/src/main/java/org/sikuli/slides/sikuli/SlideTutorial.java index e386714..5354c8b 100644 --- a/src/main/java/org/sikuli/slides/sikuli/SlideTutorial.java +++ b/src/main/java/org/sikuli/slides/sikuli/SlideTutorial.java @@ -1,116 +1,117 @@ /** @author Khalid Alharbi */ package org.sikuli.slides.sikuli; import java.awt.Color; import org.jnativehook.GlobalScreen; import org.jnativehook.NativeHookException; import org.sikuli.api.DesktopScreenRegion; import org.sikuli.api.ScreenRegion; import org.sikuli.api.visual.Canvas; import org.sikuli.api.visual.ScreenRegionCanvas; import org.sikuli.slides.listeners.GlobalKeyboardListeners; import org.sikuli.slides.listeners.GlobalMouseListeners; import org.sikuli.slides.shapes.SlideShape; import org.sikuli.slides.utils.Constants; import org.sikuli.slides.utils.UserPreferencesEditor; import org.sikuli.slides.utils.Constants.DesktopEvent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SlideTutorial { private static final Logger logger = (Logger) LoggerFactory.getLogger(SlideTutorial.class); private UserPreferencesEditor prefsEditor = new UserPreferencesEditor(); private ScreenRegion targetRegion; private Constants.DesktopEvent desktopEvent; private SlideShape slideShape; /** * A slide tutorial to be used in tutorial mode and help mode * @param targetRegion * @param slideShape * @param desktopEvent * @param observable UI observable used to observe tutorial mode navigation status. */ public SlideTutorial(ScreenRegion targetRegion, SlideShape slideShape, Constants.DesktopEvent desktopEvent){ this.targetRegion=targetRegion; this.slideShape=slideShape; this.desktopEvent=desktopEvent; } private String getActionDisplayName(){ if(desktopEvent==DesktopEvent.LEFT_CLICK){ return "Click here"; } else if(desktopEvent==DesktopEvent.DOUBLE_CLICK){ return "Double click here"; } else if(desktopEvent==DesktopEvent.RIGHT_CLICK){ return "Right click here"; } else if(desktopEvent==DesktopEvent.DRAG_N_DROP){ if(slideShape.getOrder()==0) return "Drag this"; else if(slideShape.getOrder()==1) return "and drop it here"; } else if(desktopEvent==DesktopEvent.KEYBOARD_TYPING){ return "Click and type: "+ slideShape.getText()+" here"; } return ""; } public void performTutorialSlideAction() { if(targetRegion==null){ return; } Canvas canvas=new ScreenRegionCanvas(new DesktopScreenRegion(Constants.ScreenId)); canvas.addBox(targetRegion).withLineWidth(prefsEditor.getCanvasWidthSize()); int x=targetRegion.getBounds().x; int y=targetRegion.getBounds().y; int w=targetRegion.getBounds().width; int h=targetRegion.getBounds().height; ScreenRegion labelRegion=new DesktopScreenRegion(Constants.ScreenId,x,y-h,w,h); canvas.addLabel(labelRegion, getActionDisplayName()) .withColor(Color.black).withFontSize(prefsEditor.getInstructionHintFontSize());; logger.info("Waiting for the user to pefrom "+this.desktopEvent.toString()+" on the highlighted target."); try { if(!GlobalScreen.isNativeHookRegistered()){ GlobalScreen.registerNativeHook(); } if(this.desktopEvent==DesktopEvent.KEYBOARD_TYPING){ // add native keyboard listener GlobalKeyboardListeners globalKeyboardListener=new GlobalKeyboardListeners(targetRegion,slideShape.getText(),desktopEvent); GlobalScreen.getInstance().addNativeKeyListener(globalKeyboardListener); // display canvas around the target until the user performs the appropriate action canvas.displayWhile(globalKeyboardListener); // Remove native keyboard listener GlobalScreen.getInstance().removeNativeKeyListener(globalKeyboardListener); } else if(this.desktopEvent==DesktopEvent.LEFT_CLICK || this.desktopEvent==DesktopEvent.RIGHT_CLICK || this.desktopEvent==DesktopEvent.DOUBLE_CLICK || this.desktopEvent==DesktopEvent.DRAG_N_DROP){ // add native mouse listener GlobalMouseListeners globalMouseListener=new GlobalMouseListeners(targetRegion,desktopEvent); GlobalScreen.getInstance().addNativeMouseListener(globalMouseListener); // display canvas around the target until the user performs the appropriate action canvas.displayWhile(globalMouseListener); // Remove native mouse listener GlobalScreen.getInstance().removeNativeMouseListener(globalMouseListener); } else if(this.desktopEvent==DesktopEvent.LAUNCH_BROWSER){ //TODO: } else if(this.desktopEvent==DesktopEvent.EXIST||this.desktopEvent==DesktopEvent.NOT_EXIST){ //TODO: } } catch (NativeHookException ex) { - logger.error("There was a problem in running the tutorial mode."); + logger.error("There was a problem in running the tutorial mode. " + + ex.getMessage()); System.exit(1); } } }
true
true
public void performTutorialSlideAction() { if(targetRegion==null){ return; } Canvas canvas=new ScreenRegionCanvas(new DesktopScreenRegion(Constants.ScreenId)); canvas.addBox(targetRegion).withLineWidth(prefsEditor.getCanvasWidthSize()); int x=targetRegion.getBounds().x; int y=targetRegion.getBounds().y; int w=targetRegion.getBounds().width; int h=targetRegion.getBounds().height; ScreenRegion labelRegion=new DesktopScreenRegion(Constants.ScreenId,x,y-h,w,h); canvas.addLabel(labelRegion, getActionDisplayName()) .withColor(Color.black).withFontSize(prefsEditor.getInstructionHintFontSize());; logger.info("Waiting for the user to pefrom "+this.desktopEvent.toString()+" on the highlighted target."); try { if(!GlobalScreen.isNativeHookRegistered()){ GlobalScreen.registerNativeHook(); } if(this.desktopEvent==DesktopEvent.KEYBOARD_TYPING){ // add native keyboard listener GlobalKeyboardListeners globalKeyboardListener=new GlobalKeyboardListeners(targetRegion,slideShape.getText(),desktopEvent); GlobalScreen.getInstance().addNativeKeyListener(globalKeyboardListener); // display canvas around the target until the user performs the appropriate action canvas.displayWhile(globalKeyboardListener); // Remove native keyboard listener GlobalScreen.getInstance().removeNativeKeyListener(globalKeyboardListener); } else if(this.desktopEvent==DesktopEvent.LEFT_CLICK || this.desktopEvent==DesktopEvent.RIGHT_CLICK || this.desktopEvent==DesktopEvent.DOUBLE_CLICK || this.desktopEvent==DesktopEvent.DRAG_N_DROP){ // add native mouse listener GlobalMouseListeners globalMouseListener=new GlobalMouseListeners(targetRegion,desktopEvent); GlobalScreen.getInstance().addNativeMouseListener(globalMouseListener); // display canvas around the target until the user performs the appropriate action canvas.displayWhile(globalMouseListener); // Remove native mouse listener GlobalScreen.getInstance().removeNativeMouseListener(globalMouseListener); } else if(this.desktopEvent==DesktopEvent.LAUNCH_BROWSER){ //TODO: } else if(this.desktopEvent==DesktopEvent.EXIST||this.desktopEvent==DesktopEvent.NOT_EXIST){ //TODO: } } catch (NativeHookException ex) { logger.error("There was a problem in running the tutorial mode."); System.exit(1); } }
public void performTutorialSlideAction() { if(targetRegion==null){ return; } Canvas canvas=new ScreenRegionCanvas(new DesktopScreenRegion(Constants.ScreenId)); canvas.addBox(targetRegion).withLineWidth(prefsEditor.getCanvasWidthSize()); int x=targetRegion.getBounds().x; int y=targetRegion.getBounds().y; int w=targetRegion.getBounds().width; int h=targetRegion.getBounds().height; ScreenRegion labelRegion=new DesktopScreenRegion(Constants.ScreenId,x,y-h,w,h); canvas.addLabel(labelRegion, getActionDisplayName()) .withColor(Color.black).withFontSize(prefsEditor.getInstructionHintFontSize());; logger.info("Waiting for the user to pefrom "+this.desktopEvent.toString()+" on the highlighted target."); try { if(!GlobalScreen.isNativeHookRegistered()){ GlobalScreen.registerNativeHook(); } if(this.desktopEvent==DesktopEvent.KEYBOARD_TYPING){ // add native keyboard listener GlobalKeyboardListeners globalKeyboardListener=new GlobalKeyboardListeners(targetRegion,slideShape.getText(),desktopEvent); GlobalScreen.getInstance().addNativeKeyListener(globalKeyboardListener); // display canvas around the target until the user performs the appropriate action canvas.displayWhile(globalKeyboardListener); // Remove native keyboard listener GlobalScreen.getInstance().removeNativeKeyListener(globalKeyboardListener); } else if(this.desktopEvent==DesktopEvent.LEFT_CLICK || this.desktopEvent==DesktopEvent.RIGHT_CLICK || this.desktopEvent==DesktopEvent.DOUBLE_CLICK || this.desktopEvent==DesktopEvent.DRAG_N_DROP){ // add native mouse listener GlobalMouseListeners globalMouseListener=new GlobalMouseListeners(targetRegion,desktopEvent); GlobalScreen.getInstance().addNativeMouseListener(globalMouseListener); // display canvas around the target until the user performs the appropriate action canvas.displayWhile(globalMouseListener); // Remove native mouse listener GlobalScreen.getInstance().removeNativeMouseListener(globalMouseListener); } else if(this.desktopEvent==DesktopEvent.LAUNCH_BROWSER){ //TODO: } else if(this.desktopEvent==DesktopEvent.EXIST||this.desktopEvent==DesktopEvent.NOT_EXIST){ //TODO: } } catch (NativeHookException ex) { logger.error("There was a problem in running the tutorial mode. " + ex.getMessage()); System.exit(1); } }
diff --git a/src/edu/umich/sbolt/language/AgentMessageParser.java b/src/edu/umich/sbolt/language/AgentMessageParser.java index 05534f2..fb8a8ce 100644 --- a/src/edu/umich/sbolt/language/AgentMessageParser.java +++ b/src/edu/umich/sbolt/language/AgentMessageParser.java @@ -1,265 +1,269 @@ package edu.umich.sbolt.language; import java.util.*; import edu.umich.sbolt.language.Patterns.LingObject; import edu.umich.sbolt.world.WMUtil; import sml.Identifier; public class AgentMessageParser { public static String translateAgentMessage(Identifier id){ String message = null; String type = WMUtil.getValueOfAttribute(id, "type"); System.out.println(type); Identifier fieldsId = WMUtil.getIdentifierOfAttribute(id, "fields"); if(type == null){ return null; } else if(type.equals("different-attribute-question")){ message = translateDifferentAttributeQuestion(fieldsId); } else if(type.equals("value-question")){ message = translateValueQuestion(fieldsId); } else if(type.equals("common-attribute-question")){ message = translateCommonAttributeQuestion(fieldsId); } else if(type.equals("attribute-presence-question")){ message = translateAttributePresenceQuestion(fieldsId); } else if(type.equals("category-of-word")){ message = translateCategoryQuestion(fieldsId); } else if(type.equals("category-of-property")){ message = translateCategoryPropertyQuestion(fieldsId); } else if(type.equals("how-to-measure")){ message = String.format("How do I measure %s?", WMUtil.getValueOfAttribute(fieldsId, "property")); } else if(type.equals("ambiguous-category")){ message = translateAmbiguousCategory(fieldsId); } else if(type.equals("describe-object")){ message = translateDescription(fieldsId); } else if(type.equals("dont-know")){ message = "I don't know"; } else if(type.equals("no-prep")){ message = "I don't know that preposition."; } else if(type.equals("single-word-response")){ message = WMUtil.getValueOfAttribute(fieldsId, "word"); } else if(type.equals("no-object")){ message = "I do not see the object you are talking about"; } else if(type.equals("count-response")){ int count = Integer.parseInt(WMUtil.getValueOfAttribute(fieldsId, "count")); message = "There " + (count == 1 ? "is" : "are") + " " + count; } else if(type.equals("unknown-message")){ message = "I was not able to understand your last message"; } else if(type.equals("teaching-request")){ message = translateTeachingRequest(fieldsId); } else if(type.equals("which-question")){ message = translateWhichQuestion(fieldsId); } else if(type.equals("get-next-task")){ message = "Waiting for next command..."; } else if(type.equals("get-next-subaction")){ message = "What action should I take next?"; } else if(type.equals("confirmation")){ message = "Okay."; } else if (type.equals("get-goal")){ message = "What is the goal of the action?"; } else if (type.equals("restart-task-instruction")){ message = "The provided instruction sequence does not lead to the provided goal. Please give the instructions again."; } else if(type.equals("request-index-confirmation")){ message = translateRequestIndexConfirmation(fieldsId); } else if(type.equals("describe-scene")){ message = translateSceneQuestion(fieldsId); } else if(type.equals("describe-scene-objects")){ message = translateSceneObjectsQuestion(fieldsId); } else if(type.equals("list-objects")){ message = translateObjectsQuestion(fieldsId); } else if(type.equals("location-unknown")){ message = "Relative location of object unknown"; + } else if(type.equals("play-game")){ + message = "Shall we play a game?"; + } else if(type.equals("game-start")){ + message = "Ok I know that game. Tell me \"your turn\" when it's my turn."; } return message; } private static String translateTeachingRequest(Identifier id){ LingObject obj = LingObject.createFromSoarSpeak(id, "description"); //JK different syntax for prepositions if (obj.toString().contains("preposition")) return "I don't know the " + obj.toString() + ". Please teach me with examples"; else return "I don't see " + obj.toString() + ". Please teach me to recognize one"; } private static String translateDifferentAttributeQuestion(Identifier id){ Set<String> exceptions = WMUtil.getAllValuesOfAttribute(id, "exception"); String exceptionStr = getExceptionString(exceptions); LingObject differentObject = LingObject.createFromSoarSpeak(id, "different-object"); Set<LingObject> similarObjects = LingObject.createAllFromSoarSpeak(id, "similar-object"); String message = String.format("How does %s differ from ", differentObject.toString()); for(LingObject obj : similarObjects){ message += obj.toString() + "; "; } return exceptionStr + message; } private static String translateCommonAttributeQuestion(Identifier id){ Set<String> exceptions = WMUtil.getAllValuesOfAttribute(id, "exception"); String exceptionStr = getExceptionString(exceptions); Set<LingObject> similarObjects = LingObject.createAllFromSoarSpeak(id, "object"); String message = "What do "; for(LingObject obj : similarObjects){ message += obj.toString() + "; "; } return exceptionStr + message + " have in common?"; } private static String translateAttributePresenceQuestion(Identifier id){ Set<String> exceptions = WMUtil.getAllValuesOfAttribute(id, "exception"); String exceptionStr = getExceptionString(exceptions); LingObject object = LingObject.createFromSoarSpeak(id, "object"); String message = String.format("What attribute does %s have?", object.toString()); return exceptionStr + message; } private static String translateCategoryQuestion(Identifier id){ String word = WMUtil.getValueOfAttribute(id, "word"); return String.format("What kind of attribute is %s?", word); } private static String translateCategoryPropertyQuestion(Identifier id){ String word = WMUtil.getValueOfAttribute(id, "word"); return String.format("What type of property is %s?", word); } private static String translateAmbiguousCategory(Identifier id){ Set<String> cats = WMUtil.getAllValuesOfAttribute(id, "result"); String word = WMUtil.getValueOfAttribute(id, "word"); String s = "By " + word + " do you mean "; int i = 0; for(String cat : cats){ if((++i) == cats.size()){ s += "or " + cat + "?"; } else { s += cat + ", "; } } return s; } private static String translateSceneObjectsQuestion(Identifier id){ Identifier objects = WMUtil.getIdentifierOfAttribute(id, "objects"); Set<LingObject> object = LingObject.createAllFromSoarSpeak(objects, "object"); String message = "The objects in the scene are"; Iterator<LingObject> it = object.iterator(); if (object.isEmpty()) return "There are no objects in the scene."; while(it.hasNext()) { String obj = it.next().toString(); if (!it.hasNext() && object.size() > 1) message+= " and"; if (obj.startsWith(" a") || obj.startsWith(" e") || obj.startsWith(" i") || obj.startsWith(" o") || obj.startsWith(" u")) { message += " an"; } else { message += " a"; } message += obj; if (it.hasNext() && object.size() > 2) message+= ","; } return message; } private static String translateObjectsQuestion(Identifier id){ Identifier objects = WMUtil.getIdentifierOfAttribute(id, "objects"); Set<LingObject> object = LingObject.createAllFromSoarSpeak(objects, "object"); String message = ""; Iterator<LingObject> it = object.iterator(); if (object.isEmpty()) return "Nothing."; while(it.hasNext()) { String obj = it.next().toString(); if (!it.hasNext() && object.size() > 1) message+= " and"; if (obj.startsWith(" a") || obj.startsWith(" e") || obj.startsWith(" i") || obj.startsWith(" o") || obj.startsWith(" u")) { message += " an"; } else { message += " a"; } message += obj; if (it.hasNext() && object.size() > 2) message+= ","; } return message; } private static String translateSceneQuestion(Identifier id){ String prep = WMUtil.getValueOfAttribute(id, "prep"); String prep2 = prep.replaceAll("-", " "); String object1 = LingObject.createFromSoarSpeak(id, "object1").toString(); String object2 = LingObject.createFromSoarSpeak(id, "object2").toString(); return "The" + object1 + " is " + prep2 + " the" + object2; } private static String translateValueQuestion(Identifier id){ Identifier attRelationId = WMUtil.getIdentifierOfAttribute(id, "attribute-relation"); String objString = LingObject.createFromSoarSpeak(attRelationId, "object1").toString(); String attribute = WMUtil.getValueOfAttribute(attRelationId, "word"); return String.format("What %s is %s?", attribute, objString); } private static String getExceptionString(Set<String> exceptions){ String exceptionStr = ""; if (exceptions.size() > 0) { exceptionStr = "Other than "; for(String exception : exceptions){ exceptionStr += exception + ", "; } exceptionStr = exceptionStr.substring(0, exceptionStr.length() - 2); exceptionStr += "; "; } return exceptionStr; } private static String translateDescription(Identifier id){ if(id == null){ return null; } //kind of a hack :( Identifier objectId = WMUtil.getIdentifierOfAttribute(id, "object"); if (objectId == null) return "nothing"; // CK: choose a/an correctly String ret = LingObject.createFromSoarSpeak(id, "object").toString(); if(ret.matches("^ [aeiouAEIOU].*")) { ret = "An"+ret; } else { ret = "A"+ret; } return ret; } private static String translateWhichQuestion(Identifier id){ Identifier objectId = WMUtil.getIdentifierOfAttribute(id, "description"); if (objectId == null) return "Which one?"; return "Which " + LingObject.createFromSoarSpeak(id, "description") + "?"; } private static String translateRequestIndexConfirmation(Identifier id){ LingObject obj = LingObject.createFromSoarSpeak(id, "object"); return "Is this " + obj.toString() + "?"; } }
true
true
public static String translateAgentMessage(Identifier id){ String message = null; String type = WMUtil.getValueOfAttribute(id, "type"); System.out.println(type); Identifier fieldsId = WMUtil.getIdentifierOfAttribute(id, "fields"); if(type == null){ return null; } else if(type.equals("different-attribute-question")){ message = translateDifferentAttributeQuestion(fieldsId); } else if(type.equals("value-question")){ message = translateValueQuestion(fieldsId); } else if(type.equals("common-attribute-question")){ message = translateCommonAttributeQuestion(fieldsId); } else if(type.equals("attribute-presence-question")){ message = translateAttributePresenceQuestion(fieldsId); } else if(type.equals("category-of-word")){ message = translateCategoryQuestion(fieldsId); } else if(type.equals("category-of-property")){ message = translateCategoryPropertyQuestion(fieldsId); } else if(type.equals("how-to-measure")){ message = String.format("How do I measure %s?", WMUtil.getValueOfAttribute(fieldsId, "property")); } else if(type.equals("ambiguous-category")){ message = translateAmbiguousCategory(fieldsId); } else if(type.equals("describe-object")){ message = translateDescription(fieldsId); } else if(type.equals("dont-know")){ message = "I don't know"; } else if(type.equals("no-prep")){ message = "I don't know that preposition."; } else if(type.equals("single-word-response")){ message = WMUtil.getValueOfAttribute(fieldsId, "word"); } else if(type.equals("no-object")){ message = "I do not see the object you are talking about"; } else if(type.equals("count-response")){ int count = Integer.parseInt(WMUtil.getValueOfAttribute(fieldsId, "count")); message = "There " + (count == 1 ? "is" : "are") + " " + count; } else if(type.equals("unknown-message")){ message = "I was not able to understand your last message"; } else if(type.equals("teaching-request")){ message = translateTeachingRequest(fieldsId); } else if(type.equals("which-question")){ message = translateWhichQuestion(fieldsId); } else if(type.equals("get-next-task")){ message = "Waiting for next command..."; } else if(type.equals("get-next-subaction")){ message = "What action should I take next?"; } else if(type.equals("confirmation")){ message = "Okay."; } else if (type.equals("get-goal")){ message = "What is the goal of the action?"; } else if (type.equals("restart-task-instruction")){ message = "The provided instruction sequence does not lead to the provided goal. Please give the instructions again."; } else if(type.equals("request-index-confirmation")){ message = translateRequestIndexConfirmation(fieldsId); } else if(type.equals("describe-scene")){ message = translateSceneQuestion(fieldsId); } else if(type.equals("describe-scene-objects")){ message = translateSceneObjectsQuestion(fieldsId); } else if(type.equals("list-objects")){ message = translateObjectsQuestion(fieldsId); } else if(type.equals("location-unknown")){ message = "Relative location of object unknown"; } return message; }
public static String translateAgentMessage(Identifier id){ String message = null; String type = WMUtil.getValueOfAttribute(id, "type"); System.out.println(type); Identifier fieldsId = WMUtil.getIdentifierOfAttribute(id, "fields"); if(type == null){ return null; } else if(type.equals("different-attribute-question")){ message = translateDifferentAttributeQuestion(fieldsId); } else if(type.equals("value-question")){ message = translateValueQuestion(fieldsId); } else if(type.equals("common-attribute-question")){ message = translateCommonAttributeQuestion(fieldsId); } else if(type.equals("attribute-presence-question")){ message = translateAttributePresenceQuestion(fieldsId); } else if(type.equals("category-of-word")){ message = translateCategoryQuestion(fieldsId); } else if(type.equals("category-of-property")){ message = translateCategoryPropertyQuestion(fieldsId); } else if(type.equals("how-to-measure")){ message = String.format("How do I measure %s?", WMUtil.getValueOfAttribute(fieldsId, "property")); } else if(type.equals("ambiguous-category")){ message = translateAmbiguousCategory(fieldsId); } else if(type.equals("describe-object")){ message = translateDescription(fieldsId); } else if(type.equals("dont-know")){ message = "I don't know"; } else if(type.equals("no-prep")){ message = "I don't know that preposition."; } else if(type.equals("single-word-response")){ message = WMUtil.getValueOfAttribute(fieldsId, "word"); } else if(type.equals("no-object")){ message = "I do not see the object you are talking about"; } else if(type.equals("count-response")){ int count = Integer.parseInt(WMUtil.getValueOfAttribute(fieldsId, "count")); message = "There " + (count == 1 ? "is" : "are") + " " + count; } else if(type.equals("unknown-message")){ message = "I was not able to understand your last message"; } else if(type.equals("teaching-request")){ message = translateTeachingRequest(fieldsId); } else if(type.equals("which-question")){ message = translateWhichQuestion(fieldsId); } else if(type.equals("get-next-task")){ message = "Waiting for next command..."; } else if(type.equals("get-next-subaction")){ message = "What action should I take next?"; } else if(type.equals("confirmation")){ message = "Okay."; } else if (type.equals("get-goal")){ message = "What is the goal of the action?"; } else if (type.equals("restart-task-instruction")){ message = "The provided instruction sequence does not lead to the provided goal. Please give the instructions again."; } else if(type.equals("request-index-confirmation")){ message = translateRequestIndexConfirmation(fieldsId); } else if(type.equals("describe-scene")){ message = translateSceneQuestion(fieldsId); } else if(type.equals("describe-scene-objects")){ message = translateSceneObjectsQuestion(fieldsId); } else if(type.equals("list-objects")){ message = translateObjectsQuestion(fieldsId); } else if(type.equals("location-unknown")){ message = "Relative location of object unknown"; } else if(type.equals("play-game")){ message = "Shall we play a game?"; } else if(type.equals("game-start")){ message = "Ok I know that game. Tell me \"your turn\" when it's my turn."; } return message; }
diff --git a/src/com/tomclaw/mandarin/im/icq/IcqMessageRequest.java b/src/com/tomclaw/mandarin/im/icq/IcqMessageRequest.java index 12f63dab..4f3037da 100644 --- a/src/com/tomclaw/mandarin/im/icq/IcqMessageRequest.java +++ b/src/com/tomclaw/mandarin/im/icq/IcqMessageRequest.java @@ -1,75 +1,75 @@ package com.tomclaw.mandarin.im.icq; import android.util.Pair; import com.tomclaw.mandarin.core.QueryHelper; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import java.util.List; import static com.tomclaw.mandarin.im.icq.WimConstants.*; /** * Created with IntelliJ IDEA. * User: solkin * Date: 6/12/13 * Time: 1:38 PM */ public class IcqMessageRequest extends WimRequest { private String to; private String message; private String cookie; public IcqMessageRequest() { } public IcqMessageRequest(String to, String message, String cookie) { this.to = to; this.message = message; this.cookie = cookie; } @Override public int parseResponse(JSONObject response) throws JSONException { JSONObject responseObject = response.getJSONObject(RESPONSE_OBJECT); int statusCode = responseObject.getInt(STATUS_CODE); // Check for server reply. if (statusCode == WIM_OK) { + String requestId = responseObject.getString(REQUEST_ID); JSONObject dataObject = responseObject.getJSONObject(DATA_OBJECT); - String requestId = dataObject.getString(REQUEST_ID); String state = dataObject.getString(STATE); // Checking for message state. for (int i = 0; i < IM_STATES.length; i++) { if (state.equals(IM_STATES[i])) { QueryHelper.updateMessage(getAccountRoot().getContentResolver(), requestId, i); break; } } return REQUEST_DELETE; } // Maybe incorrect aim sid or McDonald's. return REQUEST_PENDING; } @Override public String getUrl() { return getAccountRoot().getWellKnownUrls().getWebApiBase() .concat("im/sendIM"); } @Override public List<Pair<String, String>> getParams() { List<Pair<String, String>> params = new ArrayList<Pair<String, String>>(); params.add(new Pair<String, String>("aimsid", getAccountRoot().getAimSid())); params.add(new Pair<String, String>("autoResponse", "false")); params.add(new Pair<String, String>("f", "json")); params.add(new Pair<String, String>("message", message)); params.add(new Pair<String, String>("notifyDelivery", "true")); params.add(new Pair<String, String>("offlineIM", "true")); params.add(new Pair<String, String>("r", cookie)); params.add(new Pair<String, String>("t", to)); return params; } }
false
true
public int parseResponse(JSONObject response) throws JSONException { JSONObject responseObject = response.getJSONObject(RESPONSE_OBJECT); int statusCode = responseObject.getInt(STATUS_CODE); // Check for server reply. if (statusCode == WIM_OK) { JSONObject dataObject = responseObject.getJSONObject(DATA_OBJECT); String requestId = dataObject.getString(REQUEST_ID); String state = dataObject.getString(STATE); // Checking for message state. for (int i = 0; i < IM_STATES.length; i++) { if (state.equals(IM_STATES[i])) { QueryHelper.updateMessage(getAccountRoot().getContentResolver(), requestId, i); break; } } return REQUEST_DELETE; } // Maybe incorrect aim sid or McDonald's. return REQUEST_PENDING; }
public int parseResponse(JSONObject response) throws JSONException { JSONObject responseObject = response.getJSONObject(RESPONSE_OBJECT); int statusCode = responseObject.getInt(STATUS_CODE); // Check for server reply. if (statusCode == WIM_OK) { String requestId = responseObject.getString(REQUEST_ID); JSONObject dataObject = responseObject.getJSONObject(DATA_OBJECT); String state = dataObject.getString(STATE); // Checking for message state. for (int i = 0; i < IM_STATES.length; i++) { if (state.equals(IM_STATES[i])) { QueryHelper.updateMessage(getAccountRoot().getContentResolver(), requestId, i); break; } } return REQUEST_DELETE; } // Maybe incorrect aim sid or McDonald's. return REQUEST_PENDING; }
diff --git a/src/net/m_kawato/tabletpos/OrderInputHelper.java b/src/net/m_kawato/tabletpos/OrderInputHelper.java index 18c9079..6efcf5b 100644 --- a/src/net/m_kawato/tabletpos/OrderInputHelper.java +++ b/src/net/m_kawato/tabletpos/OrderInputHelper.java @@ -1,79 +1,81 @@ package net.m_kawato.tabletpos; import java.util.List; import android.app.Activity; import android.util.Log; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Spinner; public class OrderInputHelper implements AdapterView.OnItemSelectedListener { public static final String TAG = "OrderInputHelper"; private Activity activity; private Globals globals; public OrderInputHelper(Activity activity, Globals globals) { this.activity = activity; this.globals = globals; } // Event handler for spinners @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { Log.d(TAG, String.format("onItemSelected: id=%x, position=%d", parent.getId(), position)); switch (parent.getId()) { case R.id.spn_route: - globals.selectedRoute = position; - globals.selectedPlace = 0; - updatePlaceSelector(); + if (globals.selectedRoute != position) { + globals.selectedRoute = position; + globals.selectedPlace = 0; + updatePlaceSelector(); + } break; case R.id.spn_place: globals.selectedPlace = position; break; } } @Override public void onNothingSelected(AdapterView<?> parent) { Log.d(TAG, "onNothingSelected"); } // Build Spinner for route selection public void buildRouteSelector() { Log.d(TAG, "buildRouteSelector"); ArrayAdapter<String> routeAdapter = new ArrayAdapter<String>(activity, android.R.layout.simple_spinner_item); routeAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); for (String routeCode: globals.routes) { String routeName = globals.routeName.get(routeCode); routeAdapter.add(String.format("%s (%s)", routeName, routeCode)); } Spinner spnRoute = (Spinner) activity.findViewById(R.id.spn_route); spnRoute.setAdapter(routeAdapter); spnRoute.setSelection(globals.selectedRoute); spnRoute.setOnItemSelectedListener(this); } // Build Spinner for place selection public void buildPlaceSelector() { Log.d(TAG, "buildPlaceSelector"); Spinner spnPlace = (Spinner) activity.findViewById(R.id.spn_place); spnPlace.setOnItemSelectedListener(this); updatePlaceSelector(); } // Update list of Spinner for place selection private void updatePlaceSelector() { Log.d(TAG, "updatePlaceSelector"); Spinner spnPlace = (Spinner) activity.findViewById(R.id.spn_place); ArrayAdapter<String> placeAdapter = new ArrayAdapter<String>(activity, android.R.layout.simple_spinner_item); placeAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); List<String> places = globals.places.get(globals.getSelectedRouteCode()); for (String placeCode: places) { String placeName = globals.placeName.get(placeCode); placeAdapter.add(String.format("%s (%s)", placeName, placeCode)); } spnPlace.setAdapter(placeAdapter); spnPlace.setSelection(globals.selectedPlace); } }
true
true
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { Log.d(TAG, String.format("onItemSelected: id=%x, position=%d", parent.getId(), position)); switch (parent.getId()) { case R.id.spn_route: globals.selectedRoute = position; globals.selectedPlace = 0; updatePlaceSelector(); break; case R.id.spn_place: globals.selectedPlace = position; break; } }
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { Log.d(TAG, String.format("onItemSelected: id=%x, position=%d", parent.getId(), position)); switch (parent.getId()) { case R.id.spn_route: if (globals.selectedRoute != position) { globals.selectedRoute = position; globals.selectedPlace = 0; updatePlaceSelector(); } break; case R.id.spn_place: globals.selectedPlace = position; break; } }
diff --git a/sip-servlets-impl/src/main/java/org/mobicents/servlet/sip/proxy/ProxyUtils.java b/sip-servlets-impl/src/main/java/org/mobicents/servlet/sip/proxy/ProxyUtils.java index 398ad2f87..2038e49c4 100644 --- a/sip-servlets-impl/src/main/java/org/mobicents/servlet/sip/proxy/ProxyUtils.java +++ b/sip-servlets-impl/src/main/java/org/mobicents/servlet/sip/proxy/ProxyUtils.java @@ -1,373 +1,377 @@ /* * JBoss, Home of Professional Open Source * Copyright 2011, Red Hat, Inc. and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.mobicents.servlet.sip.proxy; import gov.nist.javax.sip.header.HeaderFactoryExt; import gov.nist.javax.sip.header.ims.PathHeader; import gov.nist.javax.sip.message.MessageExt; import gov.nist.javax.sip.stack.SIPTransaction; import java.util.Iterator; import javax.servlet.sip.SipURI; import javax.servlet.sip.URI; import javax.sip.ListeningPoint; import javax.sip.Transaction; import javax.sip.TransactionState; import javax.sip.address.Address; import javax.sip.header.Header; import javax.sip.header.MaxForwardsHeader; import javax.sip.header.RecordRouteHeader; import javax.sip.header.ViaHeader; import javax.sip.message.Request; import javax.sip.message.Response; import org.apache.log4j.Logger; import org.mobicents.servlet.sip.JainSipUtils; import org.mobicents.servlet.sip.SipConnector; import org.mobicents.servlet.sip.SipFactories; import org.mobicents.servlet.sip.address.SipURIImpl; import org.mobicents.servlet.sip.address.URIImpl; import org.mobicents.servlet.sip.core.dispatchers.MessageDispatcher; import org.mobicents.servlet.sip.core.session.SipApplicationSessionKey; import org.mobicents.servlet.sip.message.SipFactoryImpl; import org.mobicents.servlet.sip.message.SipServletRequestImpl; import org.mobicents.servlet.sip.message.SipServletResponseImpl; import org.mobicents.servlet.sip.startup.StaticServiceHolder; /** * TODO: Use outbound interface from ProxyParams.outboundInterface when adding local * listening point addresses. * */ public class ProxyUtils { private static final Logger logger = Logger.getLogger(ProxyUtils.class); public static Request createProxiedRequest(SipServletRequestImpl originalRequest, ProxyBranchImpl proxyBranch, URI destination, SipURI outboundInterface, SipURI routeRecord, SipURI path) { try { final Request clonedRequest = (Request) originalRequest.getMessage().clone(); final String method = clonedRequest.getMethod(); final ProxyImpl proxy = (ProxyImpl) proxyBranch.getProxy(); final SipFactoryImpl sipFactoryImpl = proxy.getSipFactoryImpl(); ((MessageExt)clonedRequest).setApplicationData(null); String outboundTransport = JainSipUtils.findTransport(clonedRequest); if(proxy.getOutboundInterface() != null) { outboundTransport = proxy.getOutboundInterface().getTransportParam(); if(outboundTransport == null) { if(proxy.getOutboundInterface().isSecure()) { outboundTransport = ListeningPoint.TCP; } else { outboundTransport = ListeningPoint.UDP; } } } if(outboundTransport == null) outboundTransport = originalRequest.getSipSession().getTransport(); if(outboundTransport == null) outboundTransport = ListeningPoint.UDP; // The target is null when proxying subsequent requests (the Route header is already there) if(destination != null) { if(logger.isDebugEnabled()){ logger.debug("request URI on the request to proxy : " + destination); } // only set the request URI if the request has no Route headers // see RFC3261 16.12 // see http://code.google.com/p/mobicents/issues/detail?id=1847 Header route = clonedRequest.getHeader("Route"); if(route == null || // it was decided that initial requests // should have their RURI changed to pass TCK testGetAddToPath001 originalRequest.isInitial()) { if(logger.isDebugEnabled()){ logger.debug("setting request uri as cloned request has no Route headers: " + destination); } //this way everything is copied even the port but might not work for TelURI... clonedRequest.setRequestURI(((URIImpl)destination).getURI()); } else { if(logger.isDebugEnabled()){ logger.debug("NOT setting request uri as cloned request has at least one Route header: " + route); } } // // Add route header // javax.sip.address.SipURI routeUri = SipFactories.addressFactory.createSipURI( // params.destination.getUser(), params.destination.getHost()); // routeUri.setPort(params.destination.getPort()); // routeUri.setLrParam(); // javax.sip.address.Address address = SipFactories.addressFactory.createAddress(params.destination.getUser(), // routeUri); // RouteHeader rheader = SipFactories.headerFactory.createRouteHeader(address); // // clonedRequest.setHeader(rheader); } else { // CANCELs are hop-by-hop, so here must remove any existing Via // headers, // Record-Route headers. We insert Via header below so we will // get response. if (method.equals(Request.CANCEL)) { clonedRequest.removeHeader(ViaHeader.NAME); clonedRequest.removeHeader(RecordRouteHeader.NAME); } SipConnector sipConnector = StaticServiceHolder.sipStandardService.findSipConnector(outboundTransport); if(sipConnector != null && sipConnector.isUseStaticAddress()) { // This is needed because otherwise we have the IP LB address here. If there is no route header // this means the request will go to the IP LB. For outbound requests we must bypass the IP LB. // http://code.google.com/p/mobicents/issues/detail?id=2210 //clonedRequest.setRequestURI(((URIImpl)(proxyBranch).getTargetURI()).getURI()); javax.sip.address.URI uri = clonedRequest.getRequestURI(); if(uri.isSipURI()) { javax.sip.address.SipURI sipUri = (javax.sip.address.SipURI) uri; JainSipUtils.optimizeUriForInternalRoutingRequest(sipConnector, sipUri, originalRequest.getSipSession(), sipFactoryImpl, outboundTransport); } if(logger.isDebugEnabled()){ logger.debug("setting request uri as cloned request has no Route headers and is using static address: " + destination); } } } // Decrease max forwards if available MaxForwardsHeader mf = (MaxForwardsHeader) clonedRequest .getHeader(MaxForwardsHeader.NAME); if (mf == null) { mf = SipFactories.headerFactory.createMaxForwardsHeader(70); clonedRequest.addHeader(mf); } else { mf.setMaxForwards(mf.getMaxForwards() - 1); } if (method.equals(Request.CANCEL)) { // Cancel is hop by hop so remove all other via headers. clonedRequest.removeHeader(ViaHeader.NAME); } final SipApplicationSessionKey sipAppKey = originalRequest.getSipSession().getSipApplicationSession().getKey(); final String appName = sipFactoryImpl.getSipApplicationDispatcher().getHashFromApplicationName(sipAppKey.getApplicationName()); final SipServletRequestImpl proxyBranchRequest = (SipServletRequestImpl) proxyBranch.getRequest(); //Add via header ViaHeader viaHeader = null;//proxyBranch.viaHeader; if(viaHeader == null) { if(proxy.getOutboundInterface() == null) { String branchId = null; // http://code.google.com/p/mobicents/issues/detail?id=2359 // ivan dubrov : TERMINATED state checking to avoid reusing the branchid for ACK to 200 if(Request.ACK.equals(method) && proxyBranchRequest != null && proxyBranchRequest.getTransaction() != null && proxyBranchRequest.getTransaction().getState() != TransactionState.TERMINATED) { branchId = proxyBranchRequest.getTransaction().getBranchId(); logger.debug("reusing original branch id " + branchId); + } else { + branchId = JainSipUtils.createBranch(sipAppKey.getId(), appName); } // Issue viaHeader = JainSipUtils.createViaHeader( sipFactoryImpl.getSipNetworkInterfaceManager(), clonedRequest, branchId, null); } else { //If outbound interface is specified use it String branchId = null; // http://code.google.com/p/mobicents/issues/detail?id=2359 // ivan dubrov : TERMINATED state checking to avoid reusing the branchid for ACK to 200 if(Request.ACK.equals(method) && proxyBranchRequest != null && proxyBranchRequest.getTransaction() != null && proxyBranchRequest.getTransaction().getState() != TransactionState.TERMINATED) { branchId = proxyBranchRequest.getTransaction().getBranchId(); logger.debug("reusing original branch id " + branchId); + } else { + branchId = JainSipUtils.createBranch(sipAppKey.getId(), appName); } viaHeader = SipFactories.headerFactory.createViaHeader( proxy.getOutboundInterface().getHost(), proxy.getOutboundInterface().getPort(), outboundTransport, branchId); } proxyBranch.viaHeader = viaHeader; } // else { // String branchId = null; // viaHeader = (ViaHeader) viaHeader.clone(); // // http://code.google.com/p/mobicents/issues/detail?id=2359 // // ivan dubrov : TERMINATED state checking to avoid reusing the branchid for ACK to 200 // if(Request.ACK.equals(method) && proxyBranchRequest != null && proxyBranchRequest.getTransaction() != null // && proxyBranchRequest.getTransaction().getState() != TransactionState.TERMINATED) { // branchId = proxyBranchRequest.getTransaction().getBranchId(); // logger.debug("reusing original branch id " + branchId); // } else { // branchId = JainSipUtils.createBranch(sipAppKey.getId(), appName); // } // viaHeader.setBranch(branchId); // } clonedRequest.addHeader(viaHeader); //Add route-record header, if enabled and if needed (if not null) if(routeRecord != null && !Request.REGISTER.equalsIgnoreCase(method)) { javax.sip.address.SipURI rrURI = null; if(proxy.getOutboundInterface() == null) { rrURI = JainSipUtils.createRecordRouteURI(sipFactoryImpl.getSipNetworkInterfaceManager(), clonedRequest); } else { rrURI = ((SipURIImpl) proxy.getOutboundInterface()).getSipURI(); } if(originalRequest.getTransport() != null) rrURI.setTransportParam(originalRequest.getTransport()); final Iterator<String> paramNames = routeRecord.getParameterNames(); // Copy the parameters set by the user while(paramNames.hasNext()) { String paramName = paramNames.next(); rrURI.setParameter(paramName, routeRecord.getParameter(paramName)); } rrURI.setParameter(MessageDispatcher.RR_PARAM_APPLICATION_NAME, appName); rrURI.setParameter(MessageDispatcher.RR_PARAM_PROXY_APP, "true"); rrURI.setParameter(MessageDispatcher.APP_ID, sipAppKey.getId()); rrURI.setLrParam(); final Address rraddress = SipFactories.addressFactory .createAddress(null, rrURI); final RecordRouteHeader recordRouteHeader = SipFactories.headerFactory .createRecordRouteHeader(rraddress); clonedRequest.addFirst(recordRouteHeader); } // Add path header if(path != null && Request.REGISTER.equalsIgnoreCase(method)) { final javax.sip.address.SipURI pathURI = JainSipUtils.createRecordRouteURI(sipFactoryImpl.getSipNetworkInterfaceManager(), clonedRequest); final Iterator<String> paramNames = path.getParameterNames(); // Copy the parameters set by the user while(paramNames.hasNext()) { String paramName = paramNames.next(); pathURI.setParameter(paramName, path.getParameter(paramName)); } final Address pathAddress = SipFactories.addressFactory .createAddress(null, pathURI); // Here I need to reference the header factory impl class because can't create path header otherwise final PathHeader pathHeader = ((HeaderFactoryExt)SipFactories.headerFactory) .createPathHeader(pathAddress); clonedRequest.addFirst(pathHeader); } return clonedRequest; } catch (Exception e) { throw new RuntimeException("Problem while creating the proxied request for message " + originalRequest.getMessage(), e); } } public static SipServletResponseImpl createProxiedResponse(SipServletResponseImpl sipServetResponse, ProxyBranchImpl proxyBranch) { final Response response = (Response)sipServetResponse.getMessage(); final Response clonedResponse = (Response) response.clone(); ((MessageExt)clonedResponse).setApplicationData(null); final Transaction transaction = sipServetResponse.getTransaction(); final int status = response.getStatusCode(); // 1. Update timer C for provisional non retransmission responses if(transaction != null && ((SIPTransaction)transaction).getMethod().equals(Request.INVITE)) { if(Response.TRYING == status) { proxyBranch.cancel1xxTimer(); } if(Response.TRYING < status && status < Response.OK) { proxyBranch.updateTimer(true); } else if(status >= Response.OK) { //remove it if response is final proxyBranch.cancel1xxTimer(); proxyBranch.cancelTimer(); } } // 2. Remove topmost via final Iterator<ViaHeader> viaHeaderIt = clonedResponse.getHeaders(ViaHeader.NAME); viaHeaderIt.next(); viaHeaderIt.remove(); if (!viaHeaderIt.hasNext()) { return null; // response was meant for this proxy } final ProxyImpl proxy = (ProxyImpl) proxyBranch.getProxy(); final SipFactoryImpl sipFactoryImpl = proxy.getSipFactoryImpl(); SipServletRequestImpl originalRequest = (SipServletRequestImpl) proxy.getOriginalRequest(); if(Request.PRACK.equals(sipServetResponse.getMethod())) { originalRequest = (SipServletRequestImpl) proxyBranch.getPrackOriginalRequest(); } SipServletResponseImpl newServletResponseImpl = null; if(transaction != null && originalRequest != null) { // non retransmission case newServletResponseImpl = new SipServletResponseImpl(clonedResponse, sipFactoryImpl, originalRequest.getTransaction(), originalRequest.getSipSession(), sipServetResponse.getDialog(), false, sipServetResponse.isRetransmission()); } else { // retransmission case newServletResponseImpl = new SipServletResponseImpl(clonedResponse, sipFactoryImpl, null, sipServetResponse.getSipSession(), sipServetResponse.getDialog(), false, sipServetResponse.isRetransmission()); } newServletResponseImpl.setOriginalRequest(originalRequest); newServletResponseImpl.setProxiedResponse(true); return newServletResponseImpl; } public static String toHexString(byte[] b) { int pos = 0; char[] c = new char[b.length * 2]; for (int i = 0; i < b.length; i++) { c[pos++] = toHex[(b[i] >> 4) & 0x0F]; c[pos++] = toHex[b[i] & 0x0f]; } return new String(c); } private static final char[] toHex = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; }
false
true
public static Request createProxiedRequest(SipServletRequestImpl originalRequest, ProxyBranchImpl proxyBranch, URI destination, SipURI outboundInterface, SipURI routeRecord, SipURI path) { try { final Request clonedRequest = (Request) originalRequest.getMessage().clone(); final String method = clonedRequest.getMethod(); final ProxyImpl proxy = (ProxyImpl) proxyBranch.getProxy(); final SipFactoryImpl sipFactoryImpl = proxy.getSipFactoryImpl(); ((MessageExt)clonedRequest).setApplicationData(null); String outboundTransport = JainSipUtils.findTransport(clonedRequest); if(proxy.getOutboundInterface() != null) { outboundTransport = proxy.getOutboundInterface().getTransportParam(); if(outboundTransport == null) { if(proxy.getOutboundInterface().isSecure()) { outboundTransport = ListeningPoint.TCP; } else { outboundTransport = ListeningPoint.UDP; } } } if(outboundTransport == null) outboundTransport = originalRequest.getSipSession().getTransport(); if(outboundTransport == null) outboundTransport = ListeningPoint.UDP; // The target is null when proxying subsequent requests (the Route header is already there) if(destination != null) { if(logger.isDebugEnabled()){ logger.debug("request URI on the request to proxy : " + destination); } // only set the request URI if the request has no Route headers // see RFC3261 16.12 // see http://code.google.com/p/mobicents/issues/detail?id=1847 Header route = clonedRequest.getHeader("Route"); if(route == null || // it was decided that initial requests // should have their RURI changed to pass TCK testGetAddToPath001 originalRequest.isInitial()) { if(logger.isDebugEnabled()){ logger.debug("setting request uri as cloned request has no Route headers: " + destination); } //this way everything is copied even the port but might not work for TelURI... clonedRequest.setRequestURI(((URIImpl)destination).getURI()); } else { if(logger.isDebugEnabled()){ logger.debug("NOT setting request uri as cloned request has at least one Route header: " + route); } } // // Add route header // javax.sip.address.SipURI routeUri = SipFactories.addressFactory.createSipURI( // params.destination.getUser(), params.destination.getHost()); // routeUri.setPort(params.destination.getPort()); // routeUri.setLrParam(); // javax.sip.address.Address address = SipFactories.addressFactory.createAddress(params.destination.getUser(), // routeUri); // RouteHeader rheader = SipFactories.headerFactory.createRouteHeader(address); // // clonedRequest.setHeader(rheader); } else { // CANCELs are hop-by-hop, so here must remove any existing Via // headers, // Record-Route headers. We insert Via header below so we will // get response. if (method.equals(Request.CANCEL)) { clonedRequest.removeHeader(ViaHeader.NAME); clonedRequest.removeHeader(RecordRouteHeader.NAME); } SipConnector sipConnector = StaticServiceHolder.sipStandardService.findSipConnector(outboundTransport); if(sipConnector != null && sipConnector.isUseStaticAddress()) { // This is needed because otherwise we have the IP LB address here. If there is no route header // this means the request will go to the IP LB. For outbound requests we must bypass the IP LB. // http://code.google.com/p/mobicents/issues/detail?id=2210 //clonedRequest.setRequestURI(((URIImpl)(proxyBranch).getTargetURI()).getURI()); javax.sip.address.URI uri = clonedRequest.getRequestURI(); if(uri.isSipURI()) { javax.sip.address.SipURI sipUri = (javax.sip.address.SipURI) uri; JainSipUtils.optimizeUriForInternalRoutingRequest(sipConnector, sipUri, originalRequest.getSipSession(), sipFactoryImpl, outboundTransport); } if(logger.isDebugEnabled()){ logger.debug("setting request uri as cloned request has no Route headers and is using static address: " + destination); } } } // Decrease max forwards if available MaxForwardsHeader mf = (MaxForwardsHeader) clonedRequest .getHeader(MaxForwardsHeader.NAME); if (mf == null) { mf = SipFactories.headerFactory.createMaxForwardsHeader(70); clonedRequest.addHeader(mf); } else { mf.setMaxForwards(mf.getMaxForwards() - 1); } if (method.equals(Request.CANCEL)) { // Cancel is hop by hop so remove all other via headers. clonedRequest.removeHeader(ViaHeader.NAME); } final SipApplicationSessionKey sipAppKey = originalRequest.getSipSession().getSipApplicationSession().getKey(); final String appName = sipFactoryImpl.getSipApplicationDispatcher().getHashFromApplicationName(sipAppKey.getApplicationName()); final SipServletRequestImpl proxyBranchRequest = (SipServletRequestImpl) proxyBranch.getRequest(); //Add via header ViaHeader viaHeader = null;//proxyBranch.viaHeader; if(viaHeader == null) { if(proxy.getOutboundInterface() == null) { String branchId = null; // http://code.google.com/p/mobicents/issues/detail?id=2359 // ivan dubrov : TERMINATED state checking to avoid reusing the branchid for ACK to 200 if(Request.ACK.equals(method) && proxyBranchRequest != null && proxyBranchRequest.getTransaction() != null && proxyBranchRequest.getTransaction().getState() != TransactionState.TERMINATED) { branchId = proxyBranchRequest.getTransaction().getBranchId(); logger.debug("reusing original branch id " + branchId); } // Issue viaHeader = JainSipUtils.createViaHeader( sipFactoryImpl.getSipNetworkInterfaceManager(), clonedRequest, branchId, null); } else { //If outbound interface is specified use it String branchId = null; // http://code.google.com/p/mobicents/issues/detail?id=2359 // ivan dubrov : TERMINATED state checking to avoid reusing the branchid for ACK to 200 if(Request.ACK.equals(method) && proxyBranchRequest != null && proxyBranchRequest.getTransaction() != null && proxyBranchRequest.getTransaction().getState() != TransactionState.TERMINATED) { branchId = proxyBranchRequest.getTransaction().getBranchId(); logger.debug("reusing original branch id " + branchId); } viaHeader = SipFactories.headerFactory.createViaHeader( proxy.getOutboundInterface().getHost(), proxy.getOutboundInterface().getPort(), outboundTransport, branchId); } proxyBranch.viaHeader = viaHeader; } // else { // String branchId = null; // viaHeader = (ViaHeader) viaHeader.clone(); // // http://code.google.com/p/mobicents/issues/detail?id=2359 // // ivan dubrov : TERMINATED state checking to avoid reusing the branchid for ACK to 200 // if(Request.ACK.equals(method) && proxyBranchRequest != null && proxyBranchRequest.getTransaction() != null // && proxyBranchRequest.getTransaction().getState() != TransactionState.TERMINATED) { // branchId = proxyBranchRequest.getTransaction().getBranchId(); // logger.debug("reusing original branch id " + branchId); // } else { // branchId = JainSipUtils.createBranch(sipAppKey.getId(), appName); // } // viaHeader.setBranch(branchId); // } clonedRequest.addHeader(viaHeader); //Add route-record header, if enabled and if needed (if not null) if(routeRecord != null && !Request.REGISTER.equalsIgnoreCase(method)) { javax.sip.address.SipURI rrURI = null; if(proxy.getOutboundInterface() == null) { rrURI = JainSipUtils.createRecordRouteURI(sipFactoryImpl.getSipNetworkInterfaceManager(), clonedRequest); } else { rrURI = ((SipURIImpl) proxy.getOutboundInterface()).getSipURI(); } if(originalRequest.getTransport() != null) rrURI.setTransportParam(originalRequest.getTransport()); final Iterator<String> paramNames = routeRecord.getParameterNames(); // Copy the parameters set by the user while(paramNames.hasNext()) { String paramName = paramNames.next(); rrURI.setParameter(paramName, routeRecord.getParameter(paramName)); } rrURI.setParameter(MessageDispatcher.RR_PARAM_APPLICATION_NAME, appName); rrURI.setParameter(MessageDispatcher.RR_PARAM_PROXY_APP, "true"); rrURI.setParameter(MessageDispatcher.APP_ID, sipAppKey.getId()); rrURI.setLrParam(); final Address rraddress = SipFactories.addressFactory .createAddress(null, rrURI); final RecordRouteHeader recordRouteHeader = SipFactories.headerFactory .createRecordRouteHeader(rraddress); clonedRequest.addFirst(recordRouteHeader); } // Add path header if(path != null && Request.REGISTER.equalsIgnoreCase(method)) { final javax.sip.address.SipURI pathURI = JainSipUtils.createRecordRouteURI(sipFactoryImpl.getSipNetworkInterfaceManager(), clonedRequest); final Iterator<String> paramNames = path.getParameterNames(); // Copy the parameters set by the user while(paramNames.hasNext()) { String paramName = paramNames.next(); pathURI.setParameter(paramName, path.getParameter(paramName)); } final Address pathAddress = SipFactories.addressFactory .createAddress(null, pathURI); // Here I need to reference the header factory impl class because can't create path header otherwise final PathHeader pathHeader = ((HeaderFactoryExt)SipFactories.headerFactory) .createPathHeader(pathAddress); clonedRequest.addFirst(pathHeader); } return clonedRequest; } catch (Exception e) { throw new RuntimeException("Problem while creating the proxied request for message " + originalRequest.getMessage(), e); } }
public static Request createProxiedRequest(SipServletRequestImpl originalRequest, ProxyBranchImpl proxyBranch, URI destination, SipURI outboundInterface, SipURI routeRecord, SipURI path) { try { final Request clonedRequest = (Request) originalRequest.getMessage().clone(); final String method = clonedRequest.getMethod(); final ProxyImpl proxy = (ProxyImpl) proxyBranch.getProxy(); final SipFactoryImpl sipFactoryImpl = proxy.getSipFactoryImpl(); ((MessageExt)clonedRequest).setApplicationData(null); String outboundTransport = JainSipUtils.findTransport(clonedRequest); if(proxy.getOutboundInterface() != null) { outboundTransport = proxy.getOutboundInterface().getTransportParam(); if(outboundTransport == null) { if(proxy.getOutboundInterface().isSecure()) { outboundTransport = ListeningPoint.TCP; } else { outboundTransport = ListeningPoint.UDP; } } } if(outboundTransport == null) outboundTransport = originalRequest.getSipSession().getTransport(); if(outboundTransport == null) outboundTransport = ListeningPoint.UDP; // The target is null when proxying subsequent requests (the Route header is already there) if(destination != null) { if(logger.isDebugEnabled()){ logger.debug("request URI on the request to proxy : " + destination); } // only set the request URI if the request has no Route headers // see RFC3261 16.12 // see http://code.google.com/p/mobicents/issues/detail?id=1847 Header route = clonedRequest.getHeader("Route"); if(route == null || // it was decided that initial requests // should have their RURI changed to pass TCK testGetAddToPath001 originalRequest.isInitial()) { if(logger.isDebugEnabled()){ logger.debug("setting request uri as cloned request has no Route headers: " + destination); } //this way everything is copied even the port but might not work for TelURI... clonedRequest.setRequestURI(((URIImpl)destination).getURI()); } else { if(logger.isDebugEnabled()){ logger.debug("NOT setting request uri as cloned request has at least one Route header: " + route); } } // // Add route header // javax.sip.address.SipURI routeUri = SipFactories.addressFactory.createSipURI( // params.destination.getUser(), params.destination.getHost()); // routeUri.setPort(params.destination.getPort()); // routeUri.setLrParam(); // javax.sip.address.Address address = SipFactories.addressFactory.createAddress(params.destination.getUser(), // routeUri); // RouteHeader rheader = SipFactories.headerFactory.createRouteHeader(address); // // clonedRequest.setHeader(rheader); } else { // CANCELs are hop-by-hop, so here must remove any existing Via // headers, // Record-Route headers. We insert Via header below so we will // get response. if (method.equals(Request.CANCEL)) { clonedRequest.removeHeader(ViaHeader.NAME); clonedRequest.removeHeader(RecordRouteHeader.NAME); } SipConnector sipConnector = StaticServiceHolder.sipStandardService.findSipConnector(outboundTransport); if(sipConnector != null && sipConnector.isUseStaticAddress()) { // This is needed because otherwise we have the IP LB address here. If there is no route header // this means the request will go to the IP LB. For outbound requests we must bypass the IP LB. // http://code.google.com/p/mobicents/issues/detail?id=2210 //clonedRequest.setRequestURI(((URIImpl)(proxyBranch).getTargetURI()).getURI()); javax.sip.address.URI uri = clonedRequest.getRequestURI(); if(uri.isSipURI()) { javax.sip.address.SipURI sipUri = (javax.sip.address.SipURI) uri; JainSipUtils.optimizeUriForInternalRoutingRequest(sipConnector, sipUri, originalRequest.getSipSession(), sipFactoryImpl, outboundTransport); } if(logger.isDebugEnabled()){ logger.debug("setting request uri as cloned request has no Route headers and is using static address: " + destination); } } } // Decrease max forwards if available MaxForwardsHeader mf = (MaxForwardsHeader) clonedRequest .getHeader(MaxForwardsHeader.NAME); if (mf == null) { mf = SipFactories.headerFactory.createMaxForwardsHeader(70); clonedRequest.addHeader(mf); } else { mf.setMaxForwards(mf.getMaxForwards() - 1); } if (method.equals(Request.CANCEL)) { // Cancel is hop by hop so remove all other via headers. clonedRequest.removeHeader(ViaHeader.NAME); } final SipApplicationSessionKey sipAppKey = originalRequest.getSipSession().getSipApplicationSession().getKey(); final String appName = sipFactoryImpl.getSipApplicationDispatcher().getHashFromApplicationName(sipAppKey.getApplicationName()); final SipServletRequestImpl proxyBranchRequest = (SipServletRequestImpl) proxyBranch.getRequest(); //Add via header ViaHeader viaHeader = null;//proxyBranch.viaHeader; if(viaHeader == null) { if(proxy.getOutboundInterface() == null) { String branchId = null; // http://code.google.com/p/mobicents/issues/detail?id=2359 // ivan dubrov : TERMINATED state checking to avoid reusing the branchid for ACK to 200 if(Request.ACK.equals(method) && proxyBranchRequest != null && proxyBranchRequest.getTransaction() != null && proxyBranchRequest.getTransaction().getState() != TransactionState.TERMINATED) { branchId = proxyBranchRequest.getTransaction().getBranchId(); logger.debug("reusing original branch id " + branchId); } else { branchId = JainSipUtils.createBranch(sipAppKey.getId(), appName); } // Issue viaHeader = JainSipUtils.createViaHeader( sipFactoryImpl.getSipNetworkInterfaceManager(), clonedRequest, branchId, null); } else { //If outbound interface is specified use it String branchId = null; // http://code.google.com/p/mobicents/issues/detail?id=2359 // ivan dubrov : TERMINATED state checking to avoid reusing the branchid for ACK to 200 if(Request.ACK.equals(method) && proxyBranchRequest != null && proxyBranchRequest.getTransaction() != null && proxyBranchRequest.getTransaction().getState() != TransactionState.TERMINATED) { branchId = proxyBranchRequest.getTransaction().getBranchId(); logger.debug("reusing original branch id " + branchId); } else { branchId = JainSipUtils.createBranch(sipAppKey.getId(), appName); } viaHeader = SipFactories.headerFactory.createViaHeader( proxy.getOutboundInterface().getHost(), proxy.getOutboundInterface().getPort(), outboundTransport, branchId); } proxyBranch.viaHeader = viaHeader; } // else { // String branchId = null; // viaHeader = (ViaHeader) viaHeader.clone(); // // http://code.google.com/p/mobicents/issues/detail?id=2359 // // ivan dubrov : TERMINATED state checking to avoid reusing the branchid for ACK to 200 // if(Request.ACK.equals(method) && proxyBranchRequest != null && proxyBranchRequest.getTransaction() != null // && proxyBranchRequest.getTransaction().getState() != TransactionState.TERMINATED) { // branchId = proxyBranchRequest.getTransaction().getBranchId(); // logger.debug("reusing original branch id " + branchId); // } else { // branchId = JainSipUtils.createBranch(sipAppKey.getId(), appName); // } // viaHeader.setBranch(branchId); // } clonedRequest.addHeader(viaHeader); //Add route-record header, if enabled and if needed (if not null) if(routeRecord != null && !Request.REGISTER.equalsIgnoreCase(method)) { javax.sip.address.SipURI rrURI = null; if(proxy.getOutboundInterface() == null) { rrURI = JainSipUtils.createRecordRouteURI(sipFactoryImpl.getSipNetworkInterfaceManager(), clonedRequest); } else { rrURI = ((SipURIImpl) proxy.getOutboundInterface()).getSipURI(); } if(originalRequest.getTransport() != null) rrURI.setTransportParam(originalRequest.getTransport()); final Iterator<String> paramNames = routeRecord.getParameterNames(); // Copy the parameters set by the user while(paramNames.hasNext()) { String paramName = paramNames.next(); rrURI.setParameter(paramName, routeRecord.getParameter(paramName)); } rrURI.setParameter(MessageDispatcher.RR_PARAM_APPLICATION_NAME, appName); rrURI.setParameter(MessageDispatcher.RR_PARAM_PROXY_APP, "true"); rrURI.setParameter(MessageDispatcher.APP_ID, sipAppKey.getId()); rrURI.setLrParam(); final Address rraddress = SipFactories.addressFactory .createAddress(null, rrURI); final RecordRouteHeader recordRouteHeader = SipFactories.headerFactory .createRecordRouteHeader(rraddress); clonedRequest.addFirst(recordRouteHeader); } // Add path header if(path != null && Request.REGISTER.equalsIgnoreCase(method)) { final javax.sip.address.SipURI pathURI = JainSipUtils.createRecordRouteURI(sipFactoryImpl.getSipNetworkInterfaceManager(), clonedRequest); final Iterator<String> paramNames = path.getParameterNames(); // Copy the parameters set by the user while(paramNames.hasNext()) { String paramName = paramNames.next(); pathURI.setParameter(paramName, path.getParameter(paramName)); } final Address pathAddress = SipFactories.addressFactory .createAddress(null, pathURI); // Here I need to reference the header factory impl class because can't create path header otherwise final PathHeader pathHeader = ((HeaderFactoryExt)SipFactories.headerFactory) .createPathHeader(pathAddress); clonedRequest.addFirst(pathHeader); } return clonedRequest; } catch (Exception e) { throw new RuntimeException("Problem while creating the proxied request for message " + originalRequest.getMessage(), e); } }
diff --git a/torrentfreak-free/src/com/torrentfreak/reader/free/articles/providers/CategoryListProvider.java b/torrentfreak-free/src/com/torrentfreak/reader/free/articles/providers/CategoryListProvider.java index 80cc9a6..331ab95 100644 --- a/torrentfreak-free/src/com/torrentfreak/reader/free/articles/providers/CategoryListProvider.java +++ b/torrentfreak-free/src/com/torrentfreak/reader/free/articles/providers/CategoryListProvider.java @@ -1,143 +1,143 @@ /* * Copyright (C) 2013 Jack Wakefield * * This file is part of TorrentFreak Reader. * * TorrentFreak Reader is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * TorrentFreak Reader is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with TorrentFreak Reader. If not, see <http://www.gnu.org/licenses/>. */ package com.torrentfreak.reader.free.articles.providers; import java.lang.Exception; import java.lang.Integer; import java.util.ArrayList; import java.util.GregorianCalendar; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import android.util.Log; import com.torrentfreak.reader.free.articles.ArticleItem; import com.torrentfreak.reader.free.articles.providers.ArticleListProvider; import com.torrentfreak.reader.free.articles.providers.exceptions.ArticleScrapeException; import com.torrentfreak.reader.free.categories.CategoryItem; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; public class CategoryListProvider extends ArticleListProvider { /** * The date scraper. */ private static final Pattern dateScraper = Pattern.compile("([0-9]{1,2})/([0-9]{1,2})/([0-9]{4})"); public CategoryListProvider(final CategoryItem category) { super(category); } public List<ArticleItem> scrape(final Document document) throws Exception { // ensure the document exists if (document == null) { throw new ArticleScrapeException("unable to parse document"); } // retrieve the article elements final Elements articleElements = document.getElementsByTag("article"); final List<ArticleItem> articles = new ArrayList<ArticleItem>(); // loop through each article element for (final Element articleElement : articleElements) { ArticleItem article = null; try { // attempt to scrape the article element article = scrapeArticleItem(articleElement); } catch (final ArticleScrapeException e) { throw e; } // ensure the article exists if (article != null) { // add the article to the article list articles.add(article); } } return articles; } private ArticleItem scrapeArticleItem(final Element articleElement) throws ArticleScrapeException { // retrieve the title element final Element titleElement = articleElement.select("header h4 a").first(); // ensure the title element exists if (titleElement == null) { throw new ArticleScrapeException("title not found"); } // retrieve the comments element final Element commentsElement = articleElement.select("footer ul li:nth-child(1) a").first(); // ensure the comments element exists if (commentsElement == null) { throw new ArticleScrapeException("comment count not found"); } // retrieve the date element - final Element dateElement = articleElement.select("footer ul li:nth-child(2)").first(); + final Element dateElement = articleElement.select("footer ul li:nth-child(3)").first(); // ensure the date element exists if (dateElement == null) { throw new ArticleScrapeException("date not found"); } // retrieve the title and URL text final String title = titleElement.text(); final String url = titleElement.attr("href"); // retrieve the comment count text and strip out any character which is not numerical String commentCountText = commentsElement.text(); commentCountText = commentCountText.replaceAll("[^0-9]", ""); int commentCount = 0; if (commentCountText.length() > 0) { commentCount = Integer.parseInt(commentCountText); } final Matcher dateMatcher = dateScraper.matcher(dateElement.text()); final GregorianCalendar date = new GregorianCalendar(); // attempt to find a match for the date from the date element text if (dateMatcher.find()) { final int year = Integer.parseInt(dateMatcher.group(3)); final int month = Integer.parseInt(dateMatcher.group(2)) - 1; final int day = Integer.parseInt(dateMatcher.group(1)); date.set(year, month, day); } // create the article setting the details to those retrieved final ArticleItem article = new ArticleItem(); article.setCategoryId(category.getId()); article.setTitle(title); article.setUrl(url); article.setDate(date); article.setCommentCount(commentCount); return article; } }
true
true
private ArticleItem scrapeArticleItem(final Element articleElement) throws ArticleScrapeException { // retrieve the title element final Element titleElement = articleElement.select("header h4 a").first(); // ensure the title element exists if (titleElement == null) { throw new ArticleScrapeException("title not found"); } // retrieve the comments element final Element commentsElement = articleElement.select("footer ul li:nth-child(1) a").first(); // ensure the comments element exists if (commentsElement == null) { throw new ArticleScrapeException("comment count not found"); } // retrieve the date element final Element dateElement = articleElement.select("footer ul li:nth-child(2)").first(); // ensure the date element exists if (dateElement == null) { throw new ArticleScrapeException("date not found"); } // retrieve the title and URL text final String title = titleElement.text(); final String url = titleElement.attr("href"); // retrieve the comment count text and strip out any character which is not numerical String commentCountText = commentsElement.text(); commentCountText = commentCountText.replaceAll("[^0-9]", ""); int commentCount = 0; if (commentCountText.length() > 0) { commentCount = Integer.parseInt(commentCountText); } final Matcher dateMatcher = dateScraper.matcher(dateElement.text()); final GregorianCalendar date = new GregorianCalendar(); // attempt to find a match for the date from the date element text if (dateMatcher.find()) { final int year = Integer.parseInt(dateMatcher.group(3)); final int month = Integer.parseInt(dateMatcher.group(2)) - 1; final int day = Integer.parseInt(dateMatcher.group(1)); date.set(year, month, day); } // create the article setting the details to those retrieved final ArticleItem article = new ArticleItem(); article.setCategoryId(category.getId()); article.setTitle(title); article.setUrl(url); article.setDate(date); article.setCommentCount(commentCount); return article; }
private ArticleItem scrapeArticleItem(final Element articleElement) throws ArticleScrapeException { // retrieve the title element final Element titleElement = articleElement.select("header h4 a").first(); // ensure the title element exists if (titleElement == null) { throw new ArticleScrapeException("title not found"); } // retrieve the comments element final Element commentsElement = articleElement.select("footer ul li:nth-child(1) a").first(); // ensure the comments element exists if (commentsElement == null) { throw new ArticleScrapeException("comment count not found"); } // retrieve the date element final Element dateElement = articleElement.select("footer ul li:nth-child(3)").first(); // ensure the date element exists if (dateElement == null) { throw new ArticleScrapeException("date not found"); } // retrieve the title and URL text final String title = titleElement.text(); final String url = titleElement.attr("href"); // retrieve the comment count text and strip out any character which is not numerical String commentCountText = commentsElement.text(); commentCountText = commentCountText.replaceAll("[^0-9]", ""); int commentCount = 0; if (commentCountText.length() > 0) { commentCount = Integer.parseInt(commentCountText); } final Matcher dateMatcher = dateScraper.matcher(dateElement.text()); final GregorianCalendar date = new GregorianCalendar(); // attempt to find a match for the date from the date element text if (dateMatcher.find()) { final int year = Integer.parseInt(dateMatcher.group(3)); final int month = Integer.parseInt(dateMatcher.group(2)) - 1; final int day = Integer.parseInt(dateMatcher.group(1)); date.set(year, month, day); } // create the article setting the details to those retrieved final ArticleItem article = new ArticleItem(); article.setCategoryId(category.getId()); article.setTitle(title); article.setUrl(url); article.setDate(date); article.setCommentCount(commentCount); return article; }
diff --git a/java/client/test/org/openqa/selenium/SingleTestSuite.java b/java/client/test/org/openqa/selenium/SingleTestSuite.java index 4b2cdb26b..3082c4381 100644 --- a/java/client/test/org/openqa/selenium/SingleTestSuite.java +++ b/java/client/test/org/openqa/selenium/SingleTestSuite.java @@ -1,95 +1,95 @@ /* Copyright 2007-2009 WebDriver committers Copyright 2007-2009 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium; import static org.openqa.selenium.Ignore.Driver.ALL; import static org.openqa.selenium.net.PortProber.findFreePort; import junit.framework.Test; import junit.framework.TestCase; import java.util.HashMap; import java.util.Map; @SuppressWarnings("unused") public class SingleTestSuite extends TestCase { private static final String CHROME = "org.openqa.selenium.chrome.ChromeDriver"; private static final String FIREFOX = "org.openqa.selenium.firefox.FirefoxDriver"; private static final String FIREFOX_TEST = "org.openqa.selenium.firefox.FirefoxDriverTestSuite$TestFirefoxDriver"; private static final String HTML_UNIT = "org.openqa.selenium.htmlunit.HtmlUnitDriver"; private static final String HTML_UNIT_JS = "org.openqa.selenium.htmlunit.JavascriptEnabledHtmlUnitDriverTestSuite$HtmlUnitDriverForTest"; private static final String IE = "org.openqa.selenium.ie.InternetExplorerDriver"; private static final String IPHONE = "org.openqa.selenium.iphone.IPhoneDriver"; private static final String OPERA = "com.opera.core.systems.OperaDriver"; private static final String REMOTE = "org.openqa.selenium.remote.server.RemoteWebDriverTestSuite$RemoteWebDriverForTest"; private static final String REMOTE_IE = "org.openqa.selenium.remote.server.RemoteWebDriverIeTestSuite$RemoteIeWebDriverForTest"; private static final String SELENIUM = "org.openqa.selenium.v1.SeleneseBackedWebDriver"; private static final Map<String, Ignore.Driver> EXCLUSIONS_BY_DRIVER = new HashMap<String, Ignore.Driver>() {{ put(CHROME, Ignore.Driver.CHROME); put(FIREFOX, Ignore.Driver.FIREFOX); put(FIREFOX_TEST, Ignore.Driver.FIREFOX); put(HTML_UNIT, Ignore.Driver.HTMLUNIT); put(HTML_UNIT_JS, Ignore.Driver.HTMLUNIT); put(IE, Ignore.Driver.IE); put(IPHONE, Ignore.Driver.IPHONE); put(REMOTE, Ignore.Driver.REMOTE); put(REMOTE_IE, Ignore.Driver.IE); put(SELENIUM, Ignore.Driver.SELENESE); }}; public static Test suite() throws Exception { - String driver = FIREFOX_TEST; + String driver = IE; System.setProperty("jna.library.path", "..\\build;build"); System.setProperty("webdriver.selenium.server.port", String.valueOf(findFreePort())); System.setProperty("webdriver.development", "true"); // System.setProperty("webdriver.debug", "true"); // System.setProperty("webdriver.firefox.reap_profile", "false"); TestSuiteBuilder builder = new TestSuiteBuilder() .addSourceDir("java/client/test") .usingDriver(driver) .keepDriverInstance() .includeJavascriptTests() .onlyRun("AlertsTest") // .method("testShouldBeAbleToFindAnElementByCssSelector") .exclude(ALL) .exclude(EXCLUSIONS_BY_DRIVER.get(driver)) .outputTestNames() .leaveRunning() ; // Yeah, this look strange :) if (REMOTE.equals(driver) || REMOTE_IE.equals(driver)) { builder.addSuiteDecorator( "org.openqa.selenium.remote.server.RemoteWebDriverTestSuite$RemoteDriverServerStarter"); } else if (SELENIUM.equals(driver)) { builder.addSuiteDecorator( "org.openqa.selenium.v1.SeleniumServerStarter"); } builder.addSuiteDecorator("org.openqa.selenium.TestNameDecorator"); return builder.create(); } }
true
true
public static Test suite() throws Exception { String driver = FIREFOX_TEST; System.setProperty("jna.library.path", "..\\build;build"); System.setProperty("webdriver.selenium.server.port", String.valueOf(findFreePort())); System.setProperty("webdriver.development", "true"); // System.setProperty("webdriver.debug", "true"); // System.setProperty("webdriver.firefox.reap_profile", "false"); TestSuiteBuilder builder = new TestSuiteBuilder() .addSourceDir("java/client/test") .usingDriver(driver) .keepDriverInstance() .includeJavascriptTests() .onlyRun("AlertsTest") // .method("testShouldBeAbleToFindAnElementByCssSelector") .exclude(ALL) .exclude(EXCLUSIONS_BY_DRIVER.get(driver)) .outputTestNames() .leaveRunning() ; // Yeah, this look strange :) if (REMOTE.equals(driver) || REMOTE_IE.equals(driver)) { builder.addSuiteDecorator( "org.openqa.selenium.remote.server.RemoteWebDriverTestSuite$RemoteDriverServerStarter"); } else if (SELENIUM.equals(driver)) { builder.addSuiteDecorator( "org.openqa.selenium.v1.SeleniumServerStarter"); } builder.addSuiteDecorator("org.openqa.selenium.TestNameDecorator"); return builder.create(); }
public static Test suite() throws Exception { String driver = IE; System.setProperty("jna.library.path", "..\\build;build"); System.setProperty("webdriver.selenium.server.port", String.valueOf(findFreePort())); System.setProperty("webdriver.development", "true"); // System.setProperty("webdriver.debug", "true"); // System.setProperty("webdriver.firefox.reap_profile", "false"); TestSuiteBuilder builder = new TestSuiteBuilder() .addSourceDir("java/client/test") .usingDriver(driver) .keepDriverInstance() .includeJavascriptTests() .onlyRun("AlertsTest") // .method("testShouldBeAbleToFindAnElementByCssSelector") .exclude(ALL) .exclude(EXCLUSIONS_BY_DRIVER.get(driver)) .outputTestNames() .leaveRunning() ; // Yeah, this look strange :) if (REMOTE.equals(driver) || REMOTE_IE.equals(driver)) { builder.addSuiteDecorator( "org.openqa.selenium.remote.server.RemoteWebDriverTestSuite$RemoteDriverServerStarter"); } else if (SELENIUM.equals(driver)) { builder.addSuiteDecorator( "org.openqa.selenium.v1.SeleniumServerStarter"); } builder.addSuiteDecorator("org.openqa.selenium.TestNameDecorator"); return builder.create(); }
diff --git a/src/com/artemis/EntitySystem.java b/src/com/artemis/EntitySystem.java index 832ac33f..406e1305 100644 --- a/src/com/artemis/EntitySystem.java +++ b/src/com/artemis/EntitySystem.java @@ -1,238 +1,238 @@ package com.artemis; import java.util.BitSet; import java.util.HashMap; import com.artemis.utils.Bag; import com.artemis.utils.ImmutableBag; /** * The most raw entity system. It should not typically be used, but you can create your own * entity system handling by extending this. It is recommended that you use the other provided * entity system implementations. * * @author Arni Arent * */ public abstract class EntitySystem implements EntityObserver { private final int systemIndex; protected World world; private Bag<Entity> actives; private BitSet allSet; private BitSet exclusionSet; private BitSet oneSet; private boolean passive; private boolean enabled; private boolean dummy; /** * Creates an entity system that uses the specified aspect as a matcher against entities. * @param aspect to match against entities */ public EntitySystem(Aspect aspect) { actives = new Bag<Entity>(); allSet = aspect.getAllSet(); exclusionSet = aspect.getExclusionSet(); oneSet = aspect.getOneSet(); systemIndex = SystemIndexManager.getIndexFor(this.getClass()); dummy = allSet.isEmpty() && oneSet.isEmpty(); // This system can't possibly be interested in any entity, so it must be "dummy" enabled = true; } /** * Called before processing of entities begins. */ protected void begin() { } public final void process() { if(enabled && checkProcessing()) { begin(); processEntities(actives); end(); } } /** * Called after the processing of entities ends. */ protected void end() { } /** * Any implementing entity system must implement this method and the logic * to process the given entities of the system. * * @param entities the entities this system contains. */ protected abstract void processEntities(ImmutableBag<Entity> entities); /** * * @return true if the system should be processed, false if not. */ protected abstract boolean checkProcessing(); /** * Override to implement code that gets executed when systems are initialized. */ protected void initialize() {}; /** * Called if the system has received a entity it is interested in, e.g. created or a component was added to it. * @param e the entity that was added to this system. */ protected void inserted(Entity e) {}; /** * Called if a entity was removed from this system, e.g. deleted or had one of it's components removed. * @param e the entity that was removed from this system. */ protected void removed(Entity e) {}; /** * Returns true if the system is enabled. * * @return True if enabled, otherwise false. */ public boolean isEnabled() { return enabled; } /** * Enabled systems are run during {@link #process()}. Systems are enabled by defautl. * * @param enabled System will not run when set to false. */ public void setEnabled(boolean enabled) { this.enabled = enabled; } /** * Will check if the entity is of interest to this system. * @param e entity to check */ protected final void check(Entity e) { if(dummy) { return; } boolean contains = e.getSystemBits().get(systemIndex); boolean interested = true; // possibly interested, let's try to prove it wrong. BitSet componentBits = e.getComponentBits(); // Check if the entity possesses ALL of the components defined in the aspect. if(!allSet.isEmpty()) { for (int i = allSet.nextSetBit(0); i >= 0; i = allSet.nextSetBit(i+1)) { if(!componentBits.get(i)) { interested = false; break; } } } // Check if the entity possesses ANY of the exclusion components, if it does then the system is not interested. if(!exclusionSet.isEmpty() && interested) { interested = !exclusionSet.intersects(componentBits); } // Check if the entity possesses ANY of the components in the oneSet. If so, the system is interested. - if(!oneSet.isEmpty()) { + if(!oneSet.isEmpty() && interested) { interested = oneSet.intersects(componentBits); } if (interested && !contains) { insertToSystem(e); } else if (!interested && contains) { removeFromSystem(e); } } private void removeFromSystem(Entity e) { actives.remove(e); e.getSystemBits().clear(systemIndex); removed(e); } private void insertToSystem(Entity e) { actives.add(e); e.getSystemBits().set(systemIndex); inserted(e); } @Override public final void added(Entity e) { check(e); } @Override public final void changed(Entity e) { check(e); } @Override public final void deleted(Entity e) { if(e.getSystemBits().get(systemIndex)) { removeFromSystem(e); } } @Override public final void disabled(Entity e) { if(e.getSystemBits().get(systemIndex)) { removeFromSystem(e); } } @Override public final void enabled(Entity e) { check(e); } protected final void setWorld(World world) { this.world = world; } public boolean isPassive() { return passive; } protected void setPassive(boolean passive) { this.passive = passive; } public ImmutableBag<Entity> getActives() { return actives; } /** * Used to generate a unique bit for each system. * Only used internally in EntitySystem. */ private static class SystemIndexManager { private static int INDEX = 0; private static HashMap<Class<? extends EntitySystem>, Integer> indices = new HashMap<Class<? extends EntitySystem>, Integer>(); private static int getIndexFor(Class<? extends EntitySystem> es){ Integer index = indices.get(es); if(index == null) { index = INDEX++; indices.put(es, index); } return index; } } }
true
true
protected final void check(Entity e) { if(dummy) { return; } boolean contains = e.getSystemBits().get(systemIndex); boolean interested = true; // possibly interested, let's try to prove it wrong. BitSet componentBits = e.getComponentBits(); // Check if the entity possesses ALL of the components defined in the aspect. if(!allSet.isEmpty()) { for (int i = allSet.nextSetBit(0); i >= 0; i = allSet.nextSetBit(i+1)) { if(!componentBits.get(i)) { interested = false; break; } } } // Check if the entity possesses ANY of the exclusion components, if it does then the system is not interested. if(!exclusionSet.isEmpty() && interested) { interested = !exclusionSet.intersects(componentBits); } // Check if the entity possesses ANY of the components in the oneSet. If so, the system is interested. if(!oneSet.isEmpty()) { interested = oneSet.intersects(componentBits); } if (interested && !contains) { insertToSystem(e); } else if (!interested && contains) { removeFromSystem(e); } }
protected final void check(Entity e) { if(dummy) { return; } boolean contains = e.getSystemBits().get(systemIndex); boolean interested = true; // possibly interested, let's try to prove it wrong. BitSet componentBits = e.getComponentBits(); // Check if the entity possesses ALL of the components defined in the aspect. if(!allSet.isEmpty()) { for (int i = allSet.nextSetBit(0); i >= 0; i = allSet.nextSetBit(i+1)) { if(!componentBits.get(i)) { interested = false; break; } } } // Check if the entity possesses ANY of the exclusion components, if it does then the system is not interested. if(!exclusionSet.isEmpty() && interested) { interested = !exclusionSet.intersects(componentBits); } // Check if the entity possesses ANY of the components in the oneSet. If so, the system is interested. if(!oneSet.isEmpty() && interested) { interested = oneSet.intersects(componentBits); } if (interested && !contains) { insertToSystem(e); } else if (!interested && contains) { removeFromSystem(e); } }
diff --git a/src/com/iCo6/util/Common.java b/src/com/iCo6/util/Common.java index 36a0d05..6e7f65d 100644 --- a/src/com/iCo6/util/Common.java +++ b/src/com/iCo6/util/Common.java @@ -1,218 +1,218 @@ package com.iCo6.util; import com.iCo6.Constants; import com.iCo6.iConomy; import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; import java.util.List; import java.util.concurrent.TimeUnit; public class Common { /** * Checks text against two variables, if it equals at least one returns true. * * @param text The text that we were provided with. * @param against The first variable that needs to be checked against * @param or The second variable that it could possibly be. * * @return <code>Boolean</code> - True or false based on text. */ public static boolean matches(String text, String... is) { for (String s : is) if (text.equalsIgnoreCase(s)) return true; return false; } public static int plural(Double amount) { if(amount > 1 || amount < -1) return 1; return 0; } public static int plural(Integer amount) { if(amount != 1 || amount != -1) return 1; return 0; } public static int plural(Long amount) { if(amount != 1 || amount != -1) return 1; return 0; } public static String formatted(String amount, List<String> maj, List<String> min) { if(Constants.Nodes.isSingle.getBoolean()) if(amount.contains(".")) amount = amount.split("\\.")[0]; String formatted = ""; String famount = amount.replace(",", ""); - if(true) { + if(Constants.Nodes.AllowMinor.getBoolean()) { String[] pieces = null; String[] fpieces = null; if(amount.contains(".")) { pieces = amount.split("\\."); fpieces = new String[] { pieces[0].replace(",", ""), pieces[1] }; } else { pieces = new String[] { amount, "0" }; fpieces = new String[] { amount.replace(",", ""), "0" }; } - if(true) { + if(Constants.Nodes.isSplit.getBoolean()) { String major = "", minor = ""; try { major = maj.get(plural(Integer.valueOf(fpieces[0]))); minor = min.get(plural(Integer.valueOf(fpieces[1]))); } catch (NumberFormatException E) { major = maj.get(plural(Long.valueOf(fpieces[0]))); minor = min.get(plural(Long.valueOf(fpieces[1]))); } if(pieces[1].startsWith("0") && !pieces[1].equals("0")) pieces[1] = pieces[1].substring(1, pieces[1].length()); if(pieces[0].startsWith("0") && !pieces[0].equals("0")) pieces[0] = pieces[0].substring(1, pieces[0].length()); try { if(Integer.valueOf(fpieces[1]) != 0 && Integer.valueOf(fpieces[0]) != 0) formatted = pieces[0] + " " + major + ", " + pieces[1] + " " + minor; else if(Integer.valueOf(fpieces[0]) != 0) formatted = pieces[0] + " " + major; else formatted = pieces[1] + " " + minor; } catch(NumberFormatException e) { if(Long.valueOf(fpieces[1]) != 0 && Long.valueOf(fpieces[0]) != 0) formatted = pieces[0] + " " + major + ", " + pieces[1] + " " + minor; else if(Long.valueOf(fpieces[0]) != 0) formatted = pieces[0] + " " + major; else formatted = pieces[1] + " " + minor; } } else { String currency = ""; if(Double.valueOf(famount) < 1 || Double.valueOf(famount) > -1) try { currency = min.get(plural(Integer.valueOf(fpieces[1]))); } catch (NumberFormatException e) { currency = min.get(plural(Long.valueOf(fpieces[1]))); } else currency = maj.get(1); formatted = amount + " " + currency; } } else { int plural = plural(Double.valueOf(famount)); String currency = maj.get(plural); formatted = amount + " " + currency; } return formatted; } public static String readableSize(long size) { String[] units = new String[] { "B", "KB", "MB", "GB", "TB", "PB" }; int mod = 1024, i; for (i = 0; size > mod; i++) size /= mod; return Math.round(size) + " " + units[i]; } public static String readableProfile(long time) { int i = 0; String[] units = new String[] { "ms", "s", "m", "hr", "day", "week", "mnth", "yr" }; int[] metric = new int[] { 1000, 60, 60, 24, 7, 30, 12 }; long current = TimeUnit.MILLISECONDS.convert(time, TimeUnit.NANOSECONDS); for(i = 0; current > metric[i]; i++) current /= metric[i]; return current + " " + units[i] + ((current > 1 && i > 1) ? "s" : ""); } public static void extract(String... names) { for(String name: names) { File actual = new File(iConomy.directory, name); if(actual.exists()) continue; InputStream input = iConomy.class.getResourceAsStream("/resources/" + name); if(input == null) continue; FileOutputStream output = null; try { output = new FileOutputStream(actual); byte[] buf = new byte[8192]; int length = 0; while ((length = input.read(buf)) > 0) output.write(buf, 0, length); System.out.println("[iConomy] Default setup file written: " + name); } catch (Exception e) { } finally { try { if (input != null) input.close(); } catch (Exception e) { } try { if (output != null) output.close(); } catch (Exception e) { } } } } public static String resourceToString(String name) { InputStream input = iConomy.class.getResourceAsStream("/resources/" + name); Writer writer = new StringWriter(); char[] buffer = new char[1024]; if(input != null) { try { int n; Reader reader = new BufferedReader(new InputStreamReader(input)); while ((n = reader.read(buffer)) != -1) writer.write(buffer, 0, n); } catch (IOException e) { try { input.close(); } catch (IOException ex) { } return null; } finally { try { input.close(); } catch (IOException e) { } } } else { return null; } String text = writer.toString().trim(); text = text.replace("\r\n", " ").replace("\n", " "); return text.trim(); } }
false
true
public static String formatted(String amount, List<String> maj, List<String> min) { if(Constants.Nodes.isSingle.getBoolean()) if(amount.contains(".")) amount = amount.split("\\.")[0]; String formatted = ""; String famount = amount.replace(",", ""); if(true) { String[] pieces = null; String[] fpieces = null; if(amount.contains(".")) { pieces = amount.split("\\."); fpieces = new String[] { pieces[0].replace(",", ""), pieces[1] }; } else { pieces = new String[] { amount, "0" }; fpieces = new String[] { amount.replace(",", ""), "0" }; } if(true) { String major = "", minor = ""; try { major = maj.get(plural(Integer.valueOf(fpieces[0]))); minor = min.get(plural(Integer.valueOf(fpieces[1]))); } catch (NumberFormatException E) { major = maj.get(plural(Long.valueOf(fpieces[0]))); minor = min.get(plural(Long.valueOf(fpieces[1]))); } if(pieces[1].startsWith("0") && !pieces[1].equals("0")) pieces[1] = pieces[1].substring(1, pieces[1].length()); if(pieces[0].startsWith("0") && !pieces[0].equals("0")) pieces[0] = pieces[0].substring(1, pieces[0].length()); try { if(Integer.valueOf(fpieces[1]) != 0 && Integer.valueOf(fpieces[0]) != 0) formatted = pieces[0] + " " + major + ", " + pieces[1] + " " + minor; else if(Integer.valueOf(fpieces[0]) != 0) formatted = pieces[0] + " " + major; else formatted = pieces[1] + " " + minor; } catch(NumberFormatException e) { if(Long.valueOf(fpieces[1]) != 0 && Long.valueOf(fpieces[0]) != 0) formatted = pieces[0] + " " + major + ", " + pieces[1] + " " + minor; else if(Long.valueOf(fpieces[0]) != 0) formatted = pieces[0] + " " + major; else formatted = pieces[1] + " " + minor; } } else { String currency = ""; if(Double.valueOf(famount) < 1 || Double.valueOf(famount) > -1) try { currency = min.get(plural(Integer.valueOf(fpieces[1]))); } catch (NumberFormatException e) { currency = min.get(plural(Long.valueOf(fpieces[1]))); } else currency = maj.get(1); formatted = amount + " " + currency; } } else { int plural = plural(Double.valueOf(famount)); String currency = maj.get(plural); formatted = amount + " " + currency; } return formatted; }
public static String formatted(String amount, List<String> maj, List<String> min) { if(Constants.Nodes.isSingle.getBoolean()) if(amount.contains(".")) amount = amount.split("\\.")[0]; String formatted = ""; String famount = amount.replace(",", ""); if(Constants.Nodes.AllowMinor.getBoolean()) { String[] pieces = null; String[] fpieces = null; if(amount.contains(".")) { pieces = amount.split("\\."); fpieces = new String[] { pieces[0].replace(",", ""), pieces[1] }; } else { pieces = new String[] { amount, "0" }; fpieces = new String[] { amount.replace(",", ""), "0" }; } if(Constants.Nodes.isSplit.getBoolean()) { String major = "", minor = ""; try { major = maj.get(plural(Integer.valueOf(fpieces[0]))); minor = min.get(plural(Integer.valueOf(fpieces[1]))); } catch (NumberFormatException E) { major = maj.get(plural(Long.valueOf(fpieces[0]))); minor = min.get(plural(Long.valueOf(fpieces[1]))); } if(pieces[1].startsWith("0") && !pieces[1].equals("0")) pieces[1] = pieces[1].substring(1, pieces[1].length()); if(pieces[0].startsWith("0") && !pieces[0].equals("0")) pieces[0] = pieces[0].substring(1, pieces[0].length()); try { if(Integer.valueOf(fpieces[1]) != 0 && Integer.valueOf(fpieces[0]) != 0) formatted = pieces[0] + " " + major + ", " + pieces[1] + " " + minor; else if(Integer.valueOf(fpieces[0]) != 0) formatted = pieces[0] + " " + major; else formatted = pieces[1] + " " + minor; } catch(NumberFormatException e) { if(Long.valueOf(fpieces[1]) != 0 && Long.valueOf(fpieces[0]) != 0) formatted = pieces[0] + " " + major + ", " + pieces[1] + " " + minor; else if(Long.valueOf(fpieces[0]) != 0) formatted = pieces[0] + " " + major; else formatted = pieces[1] + " " + minor; } } else { String currency = ""; if(Double.valueOf(famount) < 1 || Double.valueOf(famount) > -1) try { currency = min.get(plural(Integer.valueOf(fpieces[1]))); } catch (NumberFormatException e) { currency = min.get(plural(Long.valueOf(fpieces[1]))); } else currency = maj.get(1); formatted = amount + " " + currency; } } else { int plural = plural(Double.valueOf(famount)); String currency = maj.get(plural); formatted = amount + " " + currency; } return formatted; }
diff --git a/bundles/org.eclipse.equinox.p2.repository.tools/src/org/eclipse/equinox/p2/internal/repository/tools/Repo2Runnable.java b/bundles/org.eclipse.equinox.p2.repository.tools/src/org/eclipse/equinox/p2/internal/repository/tools/Repo2Runnable.java index ef2a4bb4a..801639cb0 100644 --- a/bundles/org.eclipse.equinox.p2.repository.tools/src/org/eclipse/equinox/p2/internal/repository/tools/Repo2Runnable.java +++ b/bundles/org.eclipse.equinox.p2.repository.tools/src/org/eclipse/equinox/p2/internal/repository/tools/Repo2Runnable.java @@ -1,277 +1,284 @@ /******************************************************************************* * Copyright (c) 2009 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.equinox.p2.internal.repository.tools; import java.io.File; import java.net.URI; import java.util.*; import org.eclipse.core.runtime.*; import org.eclipse.equinox.app.IApplication; import org.eclipse.equinox.app.IApplicationContext; import org.eclipse.equinox.internal.p2.core.helpers.ServiceHelper; import org.eclipse.equinox.internal.provisional.p2.artifact.repository.IArtifactRepositoryManager; import org.eclipse.equinox.internal.provisional.p2.core.ProvisionException; import org.eclipse.equinox.internal.provisional.p2.engine.*; import org.eclipse.equinox.internal.provisional.p2.engine.phases.Collect; import org.eclipse.equinox.internal.provisional.p2.metadata.IInstallableUnit; import org.eclipse.equinox.internal.provisional.p2.metadata.query.InstallableUnitQuery; import org.eclipse.equinox.internal.provisional.p2.metadata.repository.IMetadataRepository; import org.eclipse.equinox.internal.provisional.p2.metadata.repository.IMetadataRepositoryManager; import org.eclipse.equinox.internal.provisional.p2.query.Collector; /** * The transformer takes an existing p2 repository (local or remote), iterates over * its list of IUs, and fetches all of the corresponding artifacts to a user-specified location. * Once fetched, the artifacts will be in "runnable" form... that is directory-based bundles will be * extracted into folders and packed JAR files will be un-packed. * * @since 1.0 */ public class Repo2Runnable implements IApplication { private String destinationArtifactRepository; // where to publish the files private String destinationMetadataRepository; // where to copy the metadata to private List sourceArtifactRepositories = new ArrayList(); // where are the artifacts? // only one of these needs to be set. if it is the repo then we will process // the whole repo. otherwise we will process just the list of IUS. private List sourceMetadataRepositories = new ArrayList(); // where is the metadata? private List sourceIUs = new ArrayList(); // list of IUs to process // the list of IUs that we actually transformed... could have come from the repo // or have been user-specified. private Collection processedIUs = new ArrayList(); /* * Perform the transformation. */ public void run(IProgressMonitor monitor) throws ProvisionException { SubMonitor progress = SubMonitor.convert(monitor, 4); // ensure all the right parameters are set validate(); // figure out which IUs we need to process collectIUs(progress.newChild(1)); // create the operands from the list of IUs InstallableUnitOperand[] operands = new InstallableUnitOperand[processedIUs.size()]; int i = 0; for (Iterator iter = processedIUs.iterator(); iter.hasNext();) operands[i++] = new InstallableUnitOperand(null, (IInstallableUnit) iter.next()); // ensure the artifact repo will be consulted by loading it IArtifactRepositoryManager artifactRepositoryManager = Activator.getArtifactRepositoryManager(); for (Iterator iter = sourceArtifactRepositories.iterator(); iter.hasNext();) { artifactRepositoryManager.loadRepository((URI) iter.next(), progress.newChild(1)); } + // do a create here to ensure that we don't default to a #load later and grab a repo which is the wrong type + // e.g. extension location type because a plugins/ directory exists. + try { + artifactRepositoryManager.createRepository(new Path(destinationArtifactRepository).toFile().toURI(), "Runnable repository.", IArtifactRepositoryManager.TYPE_SIMPLE_REPOSITORY, null); + } catch (ProvisionException e) { + // ignore... perhaps one already exists and we will just load it later + } // call the engine with only the "collect" phase so all we do is download IProfile profile = createProfile(); ProvisioningContext context = new ProvisioningContext(); PhaseSet phaseSet = new PhaseSet(new Phase[] {new Collect(100)}) {}; Engine engine = (Engine) ServiceHelper.getService(Activator.getBundleContext(), Engine.SERVICE_NAME); if (engine == null) throw new ProvisionException("Unable to acquire engine service."); engine.perform(profile, phaseSet, operands, context, progress.newChild(1)); // publish the metadata to a destination - if requested publishMetadata(progress.newChild(1)); // cleanup by removing the temporary profile removeProfile(profile); } /* * Figure out exactly which IUs we have to process. */ private void collectIUs(IProgressMonitor monitor) throws ProvisionException { // if the user told us exactly which IUs to process, then just set it and return. if (sourceIUs != null && !sourceIUs.isEmpty()) { processedIUs = sourceIUs; return; } // get all IUs from the repos if (sourceMetadataRepositories == null || sourceMetadataRepositories.isEmpty()) throw new ProvisionException("Need to specify either a source metadata repository or a valid list of IUs."); for (Iterator iter = sourceMetadataRepositories.iterator(); iter.hasNext();) { processedIUs.addAll(getAllIUs((URI) iter.next(), monitor).toCollection()); } } /* * If there is a destination metadata repository set, then add all our transformed * IUs to it. */ private void publishMetadata(IProgressMonitor monitor) throws ProvisionException { // publishing the metadata is optional if (destinationMetadataRepository == null) return; URI location = new File(destinationMetadataRepository).toURI(); IMetadataRepositoryManager manager = Activator.getMetadataRepositoryManager(); IMetadataRepository repository; try { repository = manager.createRepository(location, location + " - metadata", IMetadataRepositoryManager.TYPE_SIMPLE_REPOSITORY, null); } catch (ProvisionException e) { repository = manager.loadRepository(location, monitor); } repository.addInstallableUnits((IInstallableUnit[]) processedIUs.toArray(new IInstallableUnit[processedIUs.size()])); } /* * Return a collector over all the IUs contained in the given repository. */ private Collector getAllIUs(URI location, IProgressMonitor monitor) throws ProvisionException { SubMonitor progress = SubMonitor.convert(monitor, 2); IMetadataRepositoryManager manager = Activator.getMetadataRepositoryManager(); IMetadataRepository repository = manager.loadRepository(location, progress.newChild(1)); Collector result = new Collector(); repository.query(InstallableUnitQuery.ANY, result, progress.newChild(1)).iterator(); return result; } /* * Remove the given profile from the profile registry. */ private void removeProfile(IProfile profile) throws ProvisionException { IProfileRegistry registry = Activator.getProfileRegistry(); registry.removeProfile(profile.getProfileId()); } /* * Create and return a new profile. */ private IProfile createProfile() throws ProvisionException { Map properties = new Properties(); properties.put(IProfile.PROP_CACHE, destinationArtifactRepository); properties.put(IProfile.PROP_INSTALL_FOLDER, destinationArtifactRepository); IProfileRegistry registry = Activator.getProfileRegistry(); return registry.addProfile(System.currentTimeMillis() + "-" + Math.random(), properties); } /* (non-Javadoc) * @see org.eclipse.equinox.app.IApplication#start(org.eclipse.equinox.app.IApplicationContext) */ public Object start(IApplicationContext context) throws Exception { String[] args = (String[]) context.getArguments().get(IApplicationContext.APPLICATION_ARGS); processCommandLineArgs(args); // perform the transformation run(null); return IApplication.EXIT_OK; } /* * Iterate over the command-line arguments and prepare the transformer for processing. */ private void processCommandLineArgs(String[] args) { if (args == null) return; for (int i = 0; i < args.length; i++) { String option = args[i]; if (i == args.length - 1 || args[i + 1].startsWith("-")) //$NON-NLS-1$ continue; String arg = args[++i]; if (option.equalsIgnoreCase("-source")) { //$NON-NLS-1$ addSourceArtifactRepository(arg); addSourceMetadataRepository(arg); } if (option.equalsIgnoreCase("-destination")) { //$NON-NLS-1$ setDestinationArtifactRepository(arg); setDestinationMetadataRepository(arg); } } } /* * Ensure all mandatory parameters have been set. Throw an exception if there * are any missing. */ private void validate() throws ProvisionException { if (sourceArtifactRepositories == null || sourceArtifactRepositories.isEmpty()) throw new ProvisionException("Need to set the source artifact repository location."); if (sourceMetadataRepositories == null && sourceIUs == null) throw new ProvisionException("Need to set the source metadata repository location or set a list of IUs to process."); if (destinationArtifactRepository == null) throw new ProvisionException("Need to set the destination artifact repository location."); } /* (non-Javadoc) * @see org.eclipse.equinox.app.IApplication#stop() */ public void stop() { // nothing to do } /* * Set the location of the metadata repository. */ public void addSourceMetadataRepository(String location) { URI uri = Activator.getURI(location); if (uri != null) sourceMetadataRepositories.add(uri); } /* * Add the given location as a metadata repository. */ public void addSourceMetadataRepository(URI location) { if (location != null) sourceMetadataRepositories.add(location); } /* * Get the list of source metadata repositories for this transformer. */ public List getSourceMetadataRepositories() { return sourceMetadataRepositories; } /* * Set the location of the artifact repository. */ public void addSourceArtifactRepository(String location) { URI uri = Activator.getURI(location); if (uri != null) sourceArtifactRepositories.add(uri); } /* * Add the given location as an artifact repository. */ public void addSourceArtifactRepository(URI location) { if (location != null) sourceArtifactRepositories.add(location); } /* * Set the destination location for the artifacts. */ public void setDestinationArtifactRepository(String location) { destinationArtifactRepository = new Path(location).toOSString(); } /* * Set the destination location for the metadata if the user wishes to * copy/publish the metadata. */ public void setDestinationMetadataRepository(String location) { destinationMetadataRepository = new Path(location).toOSString(); } /* * Set the list of installable units that we should process. Should use only one * of either this list or the source metadata repository. */ public void setSourceIUs(List ius) { sourceIUs = ius; } }
true
true
public void run(IProgressMonitor monitor) throws ProvisionException { SubMonitor progress = SubMonitor.convert(monitor, 4); // ensure all the right parameters are set validate(); // figure out which IUs we need to process collectIUs(progress.newChild(1)); // create the operands from the list of IUs InstallableUnitOperand[] operands = new InstallableUnitOperand[processedIUs.size()]; int i = 0; for (Iterator iter = processedIUs.iterator(); iter.hasNext();) operands[i++] = new InstallableUnitOperand(null, (IInstallableUnit) iter.next()); // ensure the artifact repo will be consulted by loading it IArtifactRepositoryManager artifactRepositoryManager = Activator.getArtifactRepositoryManager(); for (Iterator iter = sourceArtifactRepositories.iterator(); iter.hasNext();) { artifactRepositoryManager.loadRepository((URI) iter.next(), progress.newChild(1)); } // call the engine with only the "collect" phase so all we do is download IProfile profile = createProfile(); ProvisioningContext context = new ProvisioningContext(); PhaseSet phaseSet = new PhaseSet(new Phase[] {new Collect(100)}) {}; Engine engine = (Engine) ServiceHelper.getService(Activator.getBundleContext(), Engine.SERVICE_NAME); if (engine == null) throw new ProvisionException("Unable to acquire engine service."); engine.perform(profile, phaseSet, operands, context, progress.newChild(1)); // publish the metadata to a destination - if requested publishMetadata(progress.newChild(1)); // cleanup by removing the temporary profile removeProfile(profile); }
public void run(IProgressMonitor monitor) throws ProvisionException { SubMonitor progress = SubMonitor.convert(monitor, 4); // ensure all the right parameters are set validate(); // figure out which IUs we need to process collectIUs(progress.newChild(1)); // create the operands from the list of IUs InstallableUnitOperand[] operands = new InstallableUnitOperand[processedIUs.size()]; int i = 0; for (Iterator iter = processedIUs.iterator(); iter.hasNext();) operands[i++] = new InstallableUnitOperand(null, (IInstallableUnit) iter.next()); // ensure the artifact repo will be consulted by loading it IArtifactRepositoryManager artifactRepositoryManager = Activator.getArtifactRepositoryManager(); for (Iterator iter = sourceArtifactRepositories.iterator(); iter.hasNext();) { artifactRepositoryManager.loadRepository((URI) iter.next(), progress.newChild(1)); } // do a create here to ensure that we don't default to a #load later and grab a repo which is the wrong type // e.g. extension location type because a plugins/ directory exists. try { artifactRepositoryManager.createRepository(new Path(destinationArtifactRepository).toFile().toURI(), "Runnable repository.", IArtifactRepositoryManager.TYPE_SIMPLE_REPOSITORY, null); } catch (ProvisionException e) { // ignore... perhaps one already exists and we will just load it later } // call the engine with only the "collect" phase so all we do is download IProfile profile = createProfile(); ProvisioningContext context = new ProvisioningContext(); PhaseSet phaseSet = new PhaseSet(new Phase[] {new Collect(100)}) {}; Engine engine = (Engine) ServiceHelper.getService(Activator.getBundleContext(), Engine.SERVICE_NAME); if (engine == null) throw new ProvisionException("Unable to acquire engine service."); engine.perform(profile, phaseSet, operands, context, progress.newChild(1)); // publish the metadata to a destination - if requested publishMetadata(progress.newChild(1)); // cleanup by removing the temporary profile removeProfile(profile); }
diff --git a/src/org/geometerplus/fbreader/bookmodel/BookReader.java b/src/org/geometerplus/fbreader/bookmodel/BookReader.java index 42643071..b7b986ea 100644 --- a/src/org/geometerplus/fbreader/bookmodel/BookReader.java +++ b/src/org/geometerplus/fbreader/bookmodel/BookReader.java @@ -1,421 +1,421 @@ /* * Copyright (C) 2007-2012 Geometer Plus <[email protected]> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301, USA. */ package org.geometerplus.fbreader.bookmodel; import org.geometerplus.zlibrary.core.util.*; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.CharsetDecoder; import org.geometerplus.zlibrary.core.image.ZLImage; import org.geometerplus.zlibrary.text.model.*; public class BookReader { public final BookModel Model; private ZLTextWritableModel myCurrentTextModel = null; private boolean myTextParagraphExists = false; private boolean myTextParagraphIsNonEmpty = false; private char[] myTextBuffer = new char[4096]; private int myTextBufferLength; private StringBuilder myContentsBuffer = new StringBuilder(); private byte[] myKindStack = new byte[20]; private int myKindStackSize; private byte myHyperlinkKind; private String myHyperlinkReference = ""; private boolean myInsideTitle = false; private boolean mySectionContainsRegularContents = false; private TOCTree myCurrentContentsTree; private CharsetDecoder myByteDecoder; public BookReader(BookModel model) { Model = model; myCurrentContentsTree = model.TOCTree; } public final void setByteDecoder(CharsetDecoder decoder) { myByteDecoder = decoder; } private final void flushTextBufferToParagraph() { if (myTextBufferLength > 0) { myCurrentTextModel.addText(myTextBuffer, 0, myTextBufferLength); myTextBufferLength = 0; if (myByteDecoder != null) { myByteDecoder.reset(); } } } public final void addControl(byte kind, boolean start) { if (myTextParagraphExists) { flushTextBufferToParagraph(); myCurrentTextModel.addControl(kind, start); } if (!start && myHyperlinkReference.length() != 0 && kind == myHyperlinkKind) { myHyperlinkReference = ""; } } /* public final void addControl(ZLTextForcedControlEntry entry) { if (myTextParagraphExists) { flushTextBufferToParagraph(); myCurrentTextModel.addControl(entry); } } */ public final void pushKind(byte kind) { byte[] stack = myKindStack; if (stack.length == myKindStackSize) { stack = ZLArrayUtils.createCopy(stack, myKindStackSize, myKindStackSize << 1); myKindStack = stack; } stack[myKindStackSize++] = kind; } public final boolean popKind() { if (myKindStackSize != 0) { --myKindStackSize; return true; } return false; } public final void beginParagraph() { beginParagraph(ZLTextParagraph.Kind.TEXT_PARAGRAPH); } public final void beginParagraph(byte kind) { endParagraph(); final ZLTextWritableModel textModel = myCurrentTextModel; if (textModel != null) { textModel.createParagraph(kind); final byte[] stack = myKindStack; final int size = myKindStackSize; for (int i = 0; i < size; ++i) { textModel.addControl(stack[i], true); } if (myHyperlinkReference.length() != 0) { textModel.addHyperlinkControl(myHyperlinkKind, hyperlinkType(myHyperlinkKind), myHyperlinkReference); } myTextParagraphExists = true; } } public final void endParagraph() { if (myTextParagraphExists) { flushTextBufferToParagraph(); myTextParagraphExists = false; myTextParagraphIsNonEmpty = false; } } private final void insertEndParagraph(byte kind) { final ZLTextWritableModel textModel = myCurrentTextModel; if (textModel != null && mySectionContainsRegularContents) { int size = textModel.getParagraphsNumber(); if (size > 0 && textModel.getParagraph(size - 1).getKind() != kind) { textModel.createParagraph(kind); mySectionContainsRegularContents = false; } } } public final void insertEndOfSectionParagraph() { insertEndParagraph(ZLTextParagraph.Kind.END_OF_SECTION_PARAGRAPH); } /* public final void insertEndOfTextParagraph() { insertEndParagraph(ZLTextParagraph.Kind.END_OF_TEXT_PARAGRAPH); } */ public final void unsetCurrentTextModel() { if (myCurrentTextModel != null) { myCurrentTextModel.stopReading(); } myCurrentTextModel = null; } public final void enterTitle() { myInsideTitle = true; } public final void exitTitle() { myInsideTitle = false; } public final void setMainTextModel() { if ((myCurrentTextModel != null) && (myCurrentTextModel != Model.BookTextModel)) { myCurrentTextModel.stopReading(); } myCurrentTextModel = (ZLTextWritableModel)Model.BookTextModel; } public final void setFootnoteTextModel(String id) { if ((myCurrentTextModel != null) && (myCurrentTextModel != Model.BookTextModel)) { myCurrentTextModel.stopReading(); } myCurrentTextModel = (ZLTextWritableModel)Model.getFootnoteModel(id); } public final void addData(char[] data) { addData(data, 0, data.length, false); } public final void addData(char[] data, int offset, int length, boolean direct) { if (!myTextParagraphExists || length == 0) { return; } if (!myInsideTitle && !mySectionContainsRegularContents) { while (length > 0 && Character.isWhitespace(data[offset])) { --length; ++offset; } if (length == 0) { return; } } myTextParagraphIsNonEmpty = true; if (direct && (myTextBufferLength == 0) && !myInsideTitle) { myCurrentTextModel.addText(data, offset, length); } else { final int oldLength = myTextBufferLength; final int newLength = oldLength + length; if (myTextBuffer.length < newLength) { myTextBuffer = ZLArrayUtils.createCopy(myTextBuffer, oldLength, newLength); } System.arraycopy(data, offset, myTextBuffer, oldLength, length); myTextBufferLength = newLength; if (myInsideTitle) { addContentsData(myTextBuffer, oldLength, length); } } if (!myInsideTitle) { mySectionContainsRegularContents = true; } } private byte[] myUnderflowByteBuffer = new byte[4]; private int myUnderflowLength; public final void addByteData(byte[] data, int start, int length) { - if (!myTextParagraphExists || (length == 0)) { + if (!myTextParagraphExists || length == 0) { return; } myTextParagraphIsNonEmpty = true; final int oldLength = myTextBufferLength; if (myTextBuffer.length < oldLength + length) { myTextBuffer = ZLArrayUtils.createCopy(myTextBuffer, oldLength, oldLength + length); } final CharBuffer cb = CharBuffer.wrap(myTextBuffer, myTextBufferLength, length); if (myUnderflowLength > 0) { int l = myUnderflowLength; - while (length-- > 0) { + while (length-- > 0 && l < 4) { myUnderflowByteBuffer[l++] = data[start++]; final ByteBuffer ubb = ByteBuffer.wrap(myUnderflowByteBuffer); myByteDecoder.decode(ubb, cb, false); if (cb.position() != oldLength) { myUnderflowLength = 0; break; } } if (length == 0) { myUnderflowLength = l; return; } } ByteBuffer bb = ByteBuffer.wrap(data, start, length); myByteDecoder.decode(bb, cb, false); myTextBufferLength = cb.position(); int rem = bb.remaining(); if (rem > 0) { for (int i = 0, j = start + length - rem; i < rem;) { myUnderflowByteBuffer[i++] = data[j++]; } myUnderflowLength = rem; } if (myInsideTitle) { addContentsData(myTextBuffer, oldLength, myTextBufferLength - oldLength); } else { mySectionContainsRegularContents = true; } } private static byte hyperlinkType(byte kind) { return (kind == FBTextKind.EXTERNAL_HYPERLINK) ? FBHyperlinkType.EXTERNAL : FBHyperlinkType.INTERNAL; } public final void addHyperlinkControl(byte kind, String label) { if (myTextParagraphExists) { flushTextBufferToParagraph(); myCurrentTextModel.addHyperlinkControl(kind, hyperlinkType(kind), label); } myHyperlinkKind = kind; myHyperlinkReference = label; } public final void addHyperlinkLabel(String label) { final ZLTextWritableModel textModel = myCurrentTextModel; if (textModel != null) { int paragraphNumber = textModel.getParagraphsNumber(); if (myTextParagraphExists) { --paragraphNumber; } Model.addHyperlinkLabel(label, textModel, paragraphNumber); } } public final void addHyperlinkLabel(String label, int paragraphIndex) { Model.addHyperlinkLabel(label, myCurrentTextModel, paragraphIndex); } public final void addContentsData(char[] data) { addContentsData(data, 0, data.length); } public final void addContentsData(char[] data, int offset, int length) { if ((length != 0) && (myCurrentContentsTree != null)) { myContentsBuffer.append(data, offset, length); } } public final boolean hasContentsData() { return myContentsBuffer.length() > 0; } public final void beginContentsParagraph(int referenceNumber) { beginContentsParagraph(Model.BookTextModel, referenceNumber); } public final void beginContentsParagraph(ZLTextModel bookTextModel, int referenceNumber) { final ZLTextModel textModel = myCurrentTextModel; if (textModel == bookTextModel) { if (referenceNumber == -1) { referenceNumber = textModel.getParagraphsNumber(); } TOCTree parentTree = myCurrentContentsTree; if (parentTree.Level > 0) { if (myContentsBuffer.length() > 0) { parentTree.setText(myContentsBuffer.toString()); myContentsBuffer.delete(0, myContentsBuffer.length()); } else if (parentTree.getText() == null) { parentTree.setText("..."); } } else { myContentsBuffer.delete(0, myContentsBuffer.length()); } TOCTree tree = new TOCTree(parentTree); tree.setReference(myCurrentTextModel, referenceNumber); myCurrentContentsTree = tree; } } public final void endContentsParagraph() { final TOCTree tree = myCurrentContentsTree; if (tree.Level == 0) { myContentsBuffer.delete(0, myContentsBuffer.length()); return; } if (myContentsBuffer.length() > 0) { tree.setText(myContentsBuffer.toString()); myContentsBuffer.delete(0, myContentsBuffer.length()); } else if (tree.getText() == null) { tree.setText("..."); } myCurrentContentsTree = tree.Parent; } public final void setReference(int contentsParagraphNumber, int referenceNumber) { setReference(contentsParagraphNumber, myCurrentTextModel, referenceNumber); } public final void setReference(int contentsParagraphNumber, ZLTextWritableModel textModel, int referenceNumber) { final TOCTree contentsTree = Model.TOCTree; if (contentsParagraphNumber < contentsTree.getSize()) { contentsTree.getTreeByParagraphNumber(contentsParagraphNumber).setReference( textModel, referenceNumber ); } } public final boolean paragraphIsOpen() { return myTextParagraphExists; } public boolean paragraphIsNonEmpty() { return myTextParagraphIsNonEmpty; } public final boolean contentsParagraphIsOpen() { return myCurrentContentsTree.Level > 0; } public final void beginContentsParagraph() { beginContentsParagraph(-1); } public final void addImageReference(String ref, boolean isCover) { addImageReference(ref, (short)0, isCover); } public final void addImageReference(String ref, short vOffset, boolean isCover) { final ZLTextWritableModel textModel = myCurrentTextModel; if (textModel != null) { mySectionContainsRegularContents = true; if (myTextParagraphExists) { flushTextBufferToParagraph(); textModel.addImage(ref, vOffset, isCover); } else { beginParagraph(ZLTextParagraph.Kind.TEXT_PARAGRAPH); textModel.addControl(FBTextKind.IMAGE, true); textModel.addImage(ref, vOffset, isCover); textModel.addControl(FBTextKind.IMAGE, false); endParagraph(); } } } public final void addImage(String id, ZLImage image) { Model.addImage(id, image); } public final void addFixedHSpace(short length) { if (myTextParagraphExists) { myCurrentTextModel.addFixedHSpace(length); } } }
false
true
public final void addByteData(byte[] data, int start, int length) { if (!myTextParagraphExists || (length == 0)) { return; } myTextParagraphIsNonEmpty = true; final int oldLength = myTextBufferLength; if (myTextBuffer.length < oldLength + length) { myTextBuffer = ZLArrayUtils.createCopy(myTextBuffer, oldLength, oldLength + length); } final CharBuffer cb = CharBuffer.wrap(myTextBuffer, myTextBufferLength, length); if (myUnderflowLength > 0) { int l = myUnderflowLength; while (length-- > 0) { myUnderflowByteBuffer[l++] = data[start++]; final ByteBuffer ubb = ByteBuffer.wrap(myUnderflowByteBuffer); myByteDecoder.decode(ubb, cb, false); if (cb.position() != oldLength) { myUnderflowLength = 0; break; } } if (length == 0) { myUnderflowLength = l; return; } } ByteBuffer bb = ByteBuffer.wrap(data, start, length); myByteDecoder.decode(bb, cb, false); myTextBufferLength = cb.position(); int rem = bb.remaining(); if (rem > 0) { for (int i = 0, j = start + length - rem; i < rem;) { myUnderflowByteBuffer[i++] = data[j++]; } myUnderflowLength = rem; } if (myInsideTitle) { addContentsData(myTextBuffer, oldLength, myTextBufferLength - oldLength); } else { mySectionContainsRegularContents = true; } }
public final void addByteData(byte[] data, int start, int length) { if (!myTextParagraphExists || length == 0) { return; } myTextParagraphIsNonEmpty = true; final int oldLength = myTextBufferLength; if (myTextBuffer.length < oldLength + length) { myTextBuffer = ZLArrayUtils.createCopy(myTextBuffer, oldLength, oldLength + length); } final CharBuffer cb = CharBuffer.wrap(myTextBuffer, myTextBufferLength, length); if (myUnderflowLength > 0) { int l = myUnderflowLength; while (length-- > 0 && l < 4) { myUnderflowByteBuffer[l++] = data[start++]; final ByteBuffer ubb = ByteBuffer.wrap(myUnderflowByteBuffer); myByteDecoder.decode(ubb, cb, false); if (cb.position() != oldLength) { myUnderflowLength = 0; break; } } if (length == 0) { myUnderflowLength = l; return; } } ByteBuffer bb = ByteBuffer.wrap(data, start, length); myByteDecoder.decode(bb, cb, false); myTextBufferLength = cb.position(); int rem = bb.remaining(); if (rem > 0) { for (int i = 0, j = start + length - rem; i < rem;) { myUnderflowByteBuffer[i++] = data[j++]; } myUnderflowLength = rem; } if (myInsideTitle) { addContentsData(myTextBuffer, oldLength, myTextBufferLength - oldLength); } else { mySectionContainsRegularContents = true; } }
diff --git a/hibernate-core/src/main/java/org/hibernate/dialect/HSQLDialect.java b/hibernate-core/src/main/java/org/hibernate/dialect/HSQLDialect.java index e8c9b8ce4b..b8b78b80c0 100644 --- a/hibernate-core/src/main/java/org/hibernate/dialect/HSQLDialect.java +++ b/hibernate-core/src/main/java/org/hibernate/dialect/HSQLDialect.java @@ -1,715 +1,715 @@ /* * Hibernate, Relational Persistence for Idiomatic Java * * Copyright (c) 2010, Red Hat Inc. or third-party contributors as * indicated by the @author tags or express copyright attribution * statements applied by the authors. All third-party contributions are * distributed under license by Red Hat Inc. * * This copyrighted material is made available to anyone wishing to use, modify, * copy, or redistribute it subject to the terms and conditions of the GNU * Lesser General Public License, as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License * for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this distribution; if not, write to: * Free Software Foundation, Inc. * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA */ package org.hibernate.dialect; import java.io.Serializable; import java.sql.SQLException; import java.sql.Types; import org.jboss.logging.Logger; import org.hibernate.JDBCException; import org.hibernate.LockMode; import org.hibernate.StaleObjectStateException; import org.hibernate.cfg.Environment; import org.hibernate.dialect.function.AvgWithArgumentCastFunction; import org.hibernate.dialect.function.NoArgSQLFunction; import org.hibernate.dialect.function.SQLFunctionTemplate; import org.hibernate.dialect.function.StandardSQLFunction; import org.hibernate.dialect.function.VarArgsSQLFunction; import org.hibernate.dialect.lock.LockingStrategy; import org.hibernate.dialect.lock.OptimisticForceIncrementLockingStrategy; import org.hibernate.dialect.lock.OptimisticLockingStrategy; import org.hibernate.dialect.lock.PessimisticForceIncrementLockingStrategy; import org.hibernate.dialect.lock.PessimisticReadSelectLockingStrategy; import org.hibernate.dialect.lock.PessimisticWriteSelectLockingStrategy; import org.hibernate.dialect.lock.SelectLockingStrategy; import org.hibernate.engine.spi.SessionImplementor; import org.hibernate.exception.spi.TemplatedViolatedConstraintNameExtracter; import org.hibernate.exception.spi.ViolatedConstraintNameExtracter; import org.hibernate.internal.CoreMessageLogger; import org.hibernate.internal.util.JdbcExceptionHelper; import org.hibernate.internal.util.ReflectHelper; import org.hibernate.persister.entity.Lockable; import org.hibernate.type.StandardBasicTypes; /** * An SQL dialect compatible with HSQLDB (HyperSQL). * <p/> * Note this version supports HSQLDB version 1.8 and higher, only. * <p/> * Enhancements to version 3.5.0 GA to provide basic support for both HSQLDB 1.8.x and 2.x * Does not works with Hibernate 3.2 - 3.4 without alteration. * * @author Christoph Sturm * @author Phillip Baird * @author Fred Toussi */ public class HSQLDialect extends Dialect { private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, HSQLDialect.class.getName()); /** * version is 18 for 1.8 or 20 for 2.0 */ private int hsqldbVersion = 18; public HSQLDialect() { super(); try { Class props = ReflectHelper.classForName( "org.hsqldb.persist.HsqlDatabaseProperties" ); String versionString = (String) props.getDeclaredField( "THIS_VERSION" ).get( null ); hsqldbVersion = Integer.parseInt( versionString.substring( 0, 1 ) ) * 10; hsqldbVersion += Integer.parseInt( versionString.substring( 2, 3 ) ); } catch ( Throwable e ) { // must be a very old version } registerColumnType( Types.BIGINT, "bigint" ); registerColumnType( Types.BINARY, "binary($l)" ); registerColumnType( Types.BIT, "bit" ); registerColumnType( Types.BOOLEAN, "boolean" ); registerColumnType( Types.CHAR, "char($l)" ); registerColumnType( Types.DATE, "date" ); registerColumnType( Types.DECIMAL, "decimal($p,$s)" ); registerColumnType( Types.DOUBLE, "double" ); registerColumnType( Types.FLOAT, "float" ); registerColumnType( Types.INTEGER, "integer" ); registerColumnType( Types.LONGVARBINARY, "longvarbinary" ); registerColumnType( Types.LONGVARCHAR, "longvarchar" ); registerColumnType( Types.SMALLINT, "smallint" ); registerColumnType( Types.TINYINT, "tinyint" ); registerColumnType( Types.TIME, "time" ); registerColumnType( Types.TIMESTAMP, "timestamp" ); registerColumnType( Types.VARCHAR, "varchar($l)" ); registerColumnType( Types.VARBINARY, "varbinary($l)" ); if ( hsqldbVersion < 20 ) { registerColumnType( Types.NUMERIC, "numeric" ); } else { registerColumnType( Types.NUMERIC, "numeric($p,$s)" ); } //HSQL has no Blob/Clob support .... but just put these here for now! if ( hsqldbVersion < 20 ) { registerColumnType( Types.BLOB, "longvarbinary" ); registerColumnType( Types.CLOB, "longvarchar" ); } else { - registerColumnType( Types.BLOB, "blob" ); - registerColumnType( Types.CLOB, "clob" ); + registerColumnType( Types.BLOB, "blob($l)" ); + registerColumnType( Types.CLOB, "clob($l)" ); } // aggregate functions registerFunction( "avg", new AvgWithArgumentCastFunction( "double" ) ); // string functions registerFunction( "ascii", new StandardSQLFunction( "ascii", StandardBasicTypes.INTEGER ) ); registerFunction( "char", new StandardSQLFunction( "char", StandardBasicTypes.CHARACTER ) ); registerFunction( "lower", new StandardSQLFunction( "lower" ) ); registerFunction( "upper", new StandardSQLFunction( "upper" ) ); registerFunction( "lcase", new StandardSQLFunction( "lcase" ) ); registerFunction( "ucase", new StandardSQLFunction( "ucase" ) ); registerFunction( "soundex", new StandardSQLFunction( "soundex", StandardBasicTypes.STRING ) ); registerFunction( "ltrim", new StandardSQLFunction( "ltrim" ) ); registerFunction( "rtrim", new StandardSQLFunction( "rtrim" ) ); registerFunction( "reverse", new StandardSQLFunction( "reverse" ) ); registerFunction( "space", new StandardSQLFunction( "space", StandardBasicTypes.STRING ) ); registerFunction( "str", new SQLFunctionTemplate( StandardBasicTypes.STRING, "cast(?1 as varchar(256))" ) ); registerFunction( "to_char", new StandardSQLFunction( "to_char" ) ); registerFunction( "rawtohex", new StandardSQLFunction( "rawtohex" ) ); registerFunction( "hextoraw", new StandardSQLFunction( "hextoraw" ) ); // system functions registerFunction( "user", new NoArgSQLFunction( "user", StandardBasicTypes.STRING ) ); registerFunction( "database", new NoArgSQLFunction( "database", StandardBasicTypes.STRING ) ); // datetime functions if ( hsqldbVersion < 20 ) { registerFunction( "sysdate", new NoArgSQLFunction( "sysdate", StandardBasicTypes.DATE, false ) ); } else { registerFunction( "sysdate", new NoArgSQLFunction( "sysdate", StandardBasicTypes.TIMESTAMP, false ) ); } registerFunction( "current_date", new NoArgSQLFunction( "current_date", StandardBasicTypes.DATE, false ) ); registerFunction( "curdate", new NoArgSQLFunction( "curdate", StandardBasicTypes.DATE ) ); registerFunction( "current_timestamp", new NoArgSQLFunction( "current_timestamp", StandardBasicTypes.TIMESTAMP, false ) ); registerFunction( "now", new NoArgSQLFunction( "now", StandardBasicTypes.TIMESTAMP ) ); registerFunction( "current_time", new NoArgSQLFunction( "current_time", StandardBasicTypes.TIME, false ) ); registerFunction( "curtime", new NoArgSQLFunction( "curtime", StandardBasicTypes.TIME ) ); registerFunction( "day", new StandardSQLFunction( "day", StandardBasicTypes.INTEGER ) ); registerFunction( "dayofweek", new StandardSQLFunction( "dayofweek", StandardBasicTypes.INTEGER ) ); registerFunction( "dayofyear", new StandardSQLFunction( "dayofyear", StandardBasicTypes.INTEGER ) ); registerFunction( "dayofmonth", new StandardSQLFunction( "dayofmonth", StandardBasicTypes.INTEGER ) ); registerFunction( "month", new StandardSQLFunction( "month", StandardBasicTypes.INTEGER ) ); registerFunction( "year", new StandardSQLFunction( "year", StandardBasicTypes.INTEGER ) ); registerFunction( "week", new StandardSQLFunction( "week", StandardBasicTypes.INTEGER ) ); registerFunction( "quarter", new StandardSQLFunction( "quarter", StandardBasicTypes.INTEGER ) ); registerFunction( "hour", new StandardSQLFunction( "hour", StandardBasicTypes.INTEGER ) ); registerFunction( "minute", new StandardSQLFunction( "minute", StandardBasicTypes.INTEGER ) ); registerFunction( "second", new SQLFunctionTemplate( StandardBasicTypes.INTEGER, "cast(second(?1) as int)" ) ); registerFunction( "dayname", new StandardSQLFunction( "dayname", StandardBasicTypes.STRING ) ); registerFunction( "monthname", new StandardSQLFunction( "monthname", StandardBasicTypes.STRING ) ); // numeric functions registerFunction( "abs", new StandardSQLFunction( "abs" ) ); registerFunction( "sign", new StandardSQLFunction( "sign", StandardBasicTypes.INTEGER ) ); registerFunction( "acos", new StandardSQLFunction( "acos", StandardBasicTypes.DOUBLE ) ); registerFunction( "asin", new StandardSQLFunction( "asin", StandardBasicTypes.DOUBLE ) ); registerFunction( "atan", new StandardSQLFunction( "atan", StandardBasicTypes.DOUBLE ) ); registerFunction( "cos", new StandardSQLFunction( "cos", StandardBasicTypes.DOUBLE ) ); registerFunction( "cot", new StandardSQLFunction( "cot", StandardBasicTypes.DOUBLE ) ); registerFunction( "exp", new StandardSQLFunction( "exp", StandardBasicTypes.DOUBLE ) ); registerFunction( "log", new StandardSQLFunction( "log", StandardBasicTypes.DOUBLE ) ); registerFunction( "log10", new StandardSQLFunction( "log10", StandardBasicTypes.DOUBLE ) ); registerFunction( "sin", new StandardSQLFunction( "sin", StandardBasicTypes.DOUBLE ) ); registerFunction( "sqrt", new StandardSQLFunction( "sqrt", StandardBasicTypes.DOUBLE ) ); registerFunction( "tan", new StandardSQLFunction( "tan", StandardBasicTypes.DOUBLE ) ); registerFunction( "pi", new NoArgSQLFunction( "pi", StandardBasicTypes.DOUBLE ) ); registerFunction( "rand", new StandardSQLFunction( "rand", StandardBasicTypes.FLOAT ) ); registerFunction( "radians", new StandardSQLFunction( "radians", StandardBasicTypes.DOUBLE ) ); registerFunction( "degrees", new StandardSQLFunction( "degrees", StandardBasicTypes.DOUBLE ) ); registerFunction( "round", new StandardSQLFunction( "round" ) ); registerFunction( "roundmagic", new StandardSQLFunction( "roundmagic" ) ); registerFunction( "truncate", new StandardSQLFunction( "truncate" ) ); registerFunction( "ceiling", new StandardSQLFunction( "ceiling" ) ); registerFunction( "floor", new StandardSQLFunction( "floor" ) ); // special functions // from v. 2.2.0 ROWNUM() is supported in all modes as the equivalent of Oracle ROWNUM if ( hsqldbVersion > 21 ) { registerFunction("rownum", new NoArgSQLFunction("rownum", StandardBasicTypes.INTEGER)); } // function templates registerFunction( "concat", new VarArgsSQLFunction( StandardBasicTypes.STRING, "(", "||", ")" ) ); getDefaultProperties().setProperty( Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE ); } public String getAddColumnString() { return "add column"; } public boolean supportsIdentityColumns() { return true; } public String getIdentityColumnString() { return "generated by default as identity (start with 1)"; //not null is implicit } public String getIdentitySelectString() { return "call identity()"; } public String getIdentityInsertString() { return hsqldbVersion < 20 ? "null" : "default"; } public boolean supportsLockTimeouts() { return false; } public String getForUpdateString() { return ""; } public boolean supportsUnique() { return false; } public boolean supportsLimit() { return true; } public String getLimitString(String sql, boolean hasOffset) { if ( hsqldbVersion < 20 ) { return new StringBuilder( sql.length() + 10 ) .append( sql ) .insert( sql.toLowerCase().indexOf( "select" ) + 6, hasOffset ? " limit ? ?" : " top ?" ) .toString(); } else { return new StringBuilder( sql.length() + 20 ) .append( sql ) .append( hasOffset ? " offset ? limit ?" : " limit ?" ) .toString(); } } public boolean bindLimitParametersFirst() { return hsqldbVersion < 20; } public boolean supportsIfExistsAfterTableName() { return true; } public boolean supportsColumnCheck() { return hsqldbVersion >= 20; } public boolean supportsSequences() { return true; } public boolean supportsPooledSequences() { return true; } protected String getCreateSequenceString(String sequenceName) { return "create sequence " + sequenceName; } protected String getDropSequenceString(String sequenceName) { return "drop sequence " + sequenceName; } public String getSelectSequenceNextValString(String sequenceName) { return "next value for " + sequenceName; } public String getSequenceNextValString(String sequenceName) { return "call next value for " + sequenceName; } public String getQuerySequencesString() { // this assumes schema support, which is present in 1.8.0 and later... return "select sequence_name from information_schema.system_sequences"; } public ViolatedConstraintNameExtracter getViolatedConstraintNameExtracter() { return hsqldbVersion < 20 ? EXTRACTER_18 : EXTRACTER_20; } private static ViolatedConstraintNameExtracter EXTRACTER_18 = new TemplatedViolatedConstraintNameExtracter() { /** * Extract the name of the violated constraint from the given SQLException. * * @param sqle The exception that was the result of the constraint violation. * @return The extracted constraint name. */ public String extractConstraintName(SQLException sqle) { String constraintName = null; int errorCode = JdbcExceptionHelper.extractErrorCode( sqle ); if ( errorCode == -8 ) { constraintName = extractUsingTemplate( "Integrity constraint violation ", " table:", sqle.getMessage() ); } else if ( errorCode == -9 ) { constraintName = extractUsingTemplate( "Violation of unique index: ", " in statement [", sqle.getMessage() ); } else if ( errorCode == -104 ) { constraintName = extractUsingTemplate( "Unique constraint violation: ", " in statement [", sqle.getMessage() ); } else if ( errorCode == -177 ) { constraintName = extractUsingTemplate( "Integrity constraint violation - no parent ", " table:", sqle.getMessage() ); } return constraintName; } }; /** * HSQLDB 2.0 messages have changed * messages may be localized - therefore use the common, non-locale element " table: " */ private static ViolatedConstraintNameExtracter EXTRACTER_20 = new TemplatedViolatedConstraintNameExtracter() { public String extractConstraintName(SQLException sqle) { String constraintName = null; int errorCode = JdbcExceptionHelper.extractErrorCode( sqle ); if ( errorCode == -8 ) { constraintName = extractUsingTemplate( "; ", " table: ", sqle.getMessage() ); } else if ( errorCode == -9 ) { constraintName = extractUsingTemplate( "; ", " table: ", sqle.getMessage() ); } else if ( errorCode == -104 ) { constraintName = extractUsingTemplate( "; ", " table: ", sqle.getMessage() ); } else if ( errorCode == -177 ) { constraintName = extractUsingTemplate( "; ", " table: ", sqle.getMessage() ); } return constraintName; } }; public String getSelectClauseNullString(int sqlType) { String literal; switch ( sqlType ) { case Types.LONGVARCHAR: case Types.VARCHAR: case Types.CHAR: literal = "cast(null as varchar(100))"; break; case Types.LONGVARBINARY: case Types.VARBINARY: case Types.BINARY: literal = "cast(null as varbinary(100))"; break; case Types.CLOB: literal = "cast(null as clob)"; break; case Types.BLOB: literal = "cast(null as blob)"; break; case Types.DATE: literal = "cast(null as date)"; break; case Types.TIMESTAMP: literal = "cast(null as timestamp)"; break; case Types.BOOLEAN: literal = "cast(null as boolean)"; break; case Types.BIT: literal = "cast(null as bit)"; break; case Types.TIME: literal = "cast(null as time)"; break; default: literal = "cast(null as int)"; } return literal; } public boolean supportsUnionAll() { return true; } // temporary table support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // Hibernate uses this information for temporary tables that it uses for its own operations // therefore the appropriate strategy is taken with different versions of HSQLDB // All versions of HSQLDB support GLOBAL TEMPORARY tables where the table // definition is shared by all users but data is private to the session // HSQLDB 2.0 also supports session-based LOCAL TEMPORARY tables where // the definition and data is private to the session and table declaration // can happen in the middle of a transaction /** * Does this dialect support temporary tables? * * @return True if temp tables are supported; false otherwise. */ public boolean supportsTemporaryTables() { return true; } /** * With HSQLDB 2.0, the table name is qualified with MODULE to assist the drop * statement (in-case there is a global name beginning with HT_) * * @param baseTableName The table name from which to base the temp table name. * * @return The generated temp table name. */ public String generateTemporaryTableName(String baseTableName) { if ( hsqldbVersion < 20 ) { return "HT_" + baseTableName; } else { return "MODULE.HT_" + baseTableName; } } /** * Command used to create a temporary table. * * @return The command used to create a temporary table. */ public String getCreateTemporaryTableString() { if ( hsqldbVersion < 20 ) { return "create global temporary table"; } else { return "declare local temporary table"; } } /** * No fragment is needed if data is not needed beyond commit, otherwise * should add "on commit preserve rows" * * @return Any required postfix. */ public String getCreateTemporaryTablePostfix() { return ""; } /** * Command used to drop a temporary table. * * @return The command used to drop a temporary table. */ public String getDropTemporaryTableString() { return "drop table"; } /** * Different behavior for GLOBAL TEMPORARY (1.8) and LOCAL TEMPORARY (2.0) * <p/> * Possible return values and their meanings:<ul> * <li>{@link Boolean#TRUE} - Unequivocally, perform the temporary table DDL * in isolation.</li> * <li>{@link Boolean#FALSE} - Unequivocally, do <b>not</b> perform the * temporary table DDL in isolation.</li> * <li><i>null</i> - defer to the JDBC driver response in regards to * {@link java.sql.DatabaseMetaData#dataDefinitionCausesTransactionCommit()}</li> * </ul> * * @return see the result matrix above. */ public Boolean performTemporaryTableDDLInIsolation() { if ( hsqldbVersion < 20 ) { return Boolean.TRUE; } else { return Boolean.FALSE; } } /** * Do we need to drop the temporary table after use? * * todo - clarify usage by Hibernate * * Version 1.8 GLOBAL TEMPORARY table definitions persist beyond the end * of the session (by default, data is cleared at commit).<p> * * Version 2.x LOCAL TEMPORARY table definitions do not persist beyond * the end of the session (by default, data is cleared at commit). * * @return True if the table should be dropped. */ public boolean dropTemporaryTableAfterUse() { return true; } // current timestamp support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ /** * HSQLDB 1.8.x requires CALL CURRENT_TIMESTAMP but this should not * be treated as a callable statement. It is equivalent to * "select current_timestamp from dual" in some databases. * HSQLDB 2.0 also supports VALUES CURRENT_TIMESTAMP * * @return True if the current timestamp can be retrieved; false otherwise. */ public boolean supportsCurrentTimestampSelection() { return true; } /** * Should the value returned by {@link #getCurrentTimestampSelectString} * be treated as callable. Typically this indicates that JDBC escape * syntax is being used...<p> * * CALL CURRENT_TIMESTAMP is used but this should not * be treated as a callable statement. * * @return True if the {@link #getCurrentTimestampSelectString} return * is callable; false otherwise. */ public boolean isCurrentTimestampSelectStringCallable() { return false; } /** * Retrieve the command used to retrieve the current timestamp from the * database. * * @return The command. */ public String getCurrentTimestampSelectString() { return "call current_timestamp"; } /** * The name of the database-specific SQL function for retrieving the * current timestamp. * * @return The function name. */ public String getCurrentTimestampSQLFunctionName() { // the standard SQL function name is current_timestamp... return "current_timestamp"; } /** * For HSQLDB 2.0, this is a copy of the base class implementation. * For HSQLDB 1.8, only READ_UNCOMMITTED is supported. * * @param lockable The persister for the entity to be locked. * @param lockMode The type of lock to be acquired. * * @return The appropriate locking strategy. * * @since 3.2 */ public LockingStrategy getLockingStrategy(Lockable lockable, LockMode lockMode) { if ( lockMode == LockMode.PESSIMISTIC_FORCE_INCREMENT ) { return new PessimisticForceIncrementLockingStrategy( lockable, lockMode ); } else if ( lockMode == LockMode.PESSIMISTIC_WRITE ) { return new PessimisticWriteSelectLockingStrategy( lockable, lockMode ); } else if ( lockMode == LockMode.PESSIMISTIC_READ ) { return new PessimisticReadSelectLockingStrategy( lockable, lockMode ); } else if ( lockMode == LockMode.OPTIMISTIC ) { return new OptimisticLockingStrategy( lockable, lockMode ); } else if ( lockMode == LockMode.OPTIMISTIC_FORCE_INCREMENT ) { return new OptimisticForceIncrementLockingStrategy( lockable, lockMode ); } if ( hsqldbVersion < 20 ) { return new ReadUncommittedLockingStrategy( lockable, lockMode ); } else { return new SelectLockingStrategy( lockable, lockMode ); } } public static class ReadUncommittedLockingStrategy extends SelectLockingStrategy { public ReadUncommittedLockingStrategy(Lockable lockable, LockMode lockMode) { super( lockable, lockMode ); } public void lock(Serializable id, Object version, Object object, int timeout, SessionImplementor session) throws StaleObjectStateException, JDBCException { if ( getLockMode().greaterThan( LockMode.READ ) ) { LOG.hsqldbSupportsOnlyReadCommittedIsolation(); } super.lock( id, version, object, timeout, session ); } } public boolean supportsCommentOn() { return hsqldbVersion >= 20; } // Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @Override public boolean supportsEmptyInList() { return false; } /** * todo - needs usage clarification * * If the SELECT statement is always part of a UNION, then the type of * parameter is resolved by v. 2.0, but not v. 1.8 (assuming the other * SELECT in the UNION has a column reference in the same position and * can be type-resolved). * * On the other hand if the SELECT statement is isolated, all versions of * HSQLDB require casting for "select ? from .." to work. * * @return True if select clause parameter must be cast()ed * * @since 3.2 */ public boolean requiresCastingOfParametersInSelectClause() { return true; } /** * For the underlying database, is READ_COMMITTED isolation implemented by * forcing readers to wait for write locks to be released? * * @return True if writers block readers to achieve READ_COMMITTED; false otherwise. */ public boolean doesReadCommittedCauseWritersToBlockReaders() { return hsqldbVersion >= 20; } /** * For the underlying database, is REPEATABLE_READ isolation implemented by * forcing writers to wait for read locks to be released? * * @return True if readers block writers to achieve REPEATABLE_READ; false otherwise. */ public boolean doesRepeatableReadCauseReadersToBlockWriters() { return hsqldbVersion >= 20; } public boolean supportsLobValueChangePropogation() { return false; } public String toBooleanValueString(boolean bool) { return String.valueOf( bool ); } public boolean supportsTupleDistinctCounts() { return false; } @Override public boolean supportsNotNullUnique() { return false; } }
true
true
public HSQLDialect() { super(); try { Class props = ReflectHelper.classForName( "org.hsqldb.persist.HsqlDatabaseProperties" ); String versionString = (String) props.getDeclaredField( "THIS_VERSION" ).get( null ); hsqldbVersion = Integer.parseInt( versionString.substring( 0, 1 ) ) * 10; hsqldbVersion += Integer.parseInt( versionString.substring( 2, 3 ) ); } catch ( Throwable e ) { // must be a very old version } registerColumnType( Types.BIGINT, "bigint" ); registerColumnType( Types.BINARY, "binary($l)" ); registerColumnType( Types.BIT, "bit" ); registerColumnType( Types.BOOLEAN, "boolean" ); registerColumnType( Types.CHAR, "char($l)" ); registerColumnType( Types.DATE, "date" ); registerColumnType( Types.DECIMAL, "decimal($p,$s)" ); registerColumnType( Types.DOUBLE, "double" ); registerColumnType( Types.FLOAT, "float" ); registerColumnType( Types.INTEGER, "integer" ); registerColumnType( Types.LONGVARBINARY, "longvarbinary" ); registerColumnType( Types.LONGVARCHAR, "longvarchar" ); registerColumnType( Types.SMALLINT, "smallint" ); registerColumnType( Types.TINYINT, "tinyint" ); registerColumnType( Types.TIME, "time" ); registerColumnType( Types.TIMESTAMP, "timestamp" ); registerColumnType( Types.VARCHAR, "varchar($l)" ); registerColumnType( Types.VARBINARY, "varbinary($l)" ); if ( hsqldbVersion < 20 ) { registerColumnType( Types.NUMERIC, "numeric" ); } else { registerColumnType( Types.NUMERIC, "numeric($p,$s)" ); } //HSQL has no Blob/Clob support .... but just put these here for now! if ( hsqldbVersion < 20 ) { registerColumnType( Types.BLOB, "longvarbinary" ); registerColumnType( Types.CLOB, "longvarchar" ); } else { registerColumnType( Types.BLOB, "blob" ); registerColumnType( Types.CLOB, "clob" ); } // aggregate functions registerFunction( "avg", new AvgWithArgumentCastFunction( "double" ) ); // string functions registerFunction( "ascii", new StandardSQLFunction( "ascii", StandardBasicTypes.INTEGER ) ); registerFunction( "char", new StandardSQLFunction( "char", StandardBasicTypes.CHARACTER ) ); registerFunction( "lower", new StandardSQLFunction( "lower" ) ); registerFunction( "upper", new StandardSQLFunction( "upper" ) ); registerFunction( "lcase", new StandardSQLFunction( "lcase" ) ); registerFunction( "ucase", new StandardSQLFunction( "ucase" ) ); registerFunction( "soundex", new StandardSQLFunction( "soundex", StandardBasicTypes.STRING ) ); registerFunction( "ltrim", new StandardSQLFunction( "ltrim" ) ); registerFunction( "rtrim", new StandardSQLFunction( "rtrim" ) ); registerFunction( "reverse", new StandardSQLFunction( "reverse" ) ); registerFunction( "space", new StandardSQLFunction( "space", StandardBasicTypes.STRING ) ); registerFunction( "str", new SQLFunctionTemplate( StandardBasicTypes.STRING, "cast(?1 as varchar(256))" ) ); registerFunction( "to_char", new StandardSQLFunction( "to_char" ) ); registerFunction( "rawtohex", new StandardSQLFunction( "rawtohex" ) ); registerFunction( "hextoraw", new StandardSQLFunction( "hextoraw" ) ); // system functions registerFunction( "user", new NoArgSQLFunction( "user", StandardBasicTypes.STRING ) ); registerFunction( "database", new NoArgSQLFunction( "database", StandardBasicTypes.STRING ) ); // datetime functions if ( hsqldbVersion < 20 ) { registerFunction( "sysdate", new NoArgSQLFunction( "sysdate", StandardBasicTypes.DATE, false ) ); } else { registerFunction( "sysdate", new NoArgSQLFunction( "sysdate", StandardBasicTypes.TIMESTAMP, false ) ); } registerFunction( "current_date", new NoArgSQLFunction( "current_date", StandardBasicTypes.DATE, false ) ); registerFunction( "curdate", new NoArgSQLFunction( "curdate", StandardBasicTypes.DATE ) ); registerFunction( "current_timestamp", new NoArgSQLFunction( "current_timestamp", StandardBasicTypes.TIMESTAMP, false ) ); registerFunction( "now", new NoArgSQLFunction( "now", StandardBasicTypes.TIMESTAMP ) ); registerFunction( "current_time", new NoArgSQLFunction( "current_time", StandardBasicTypes.TIME, false ) ); registerFunction( "curtime", new NoArgSQLFunction( "curtime", StandardBasicTypes.TIME ) ); registerFunction( "day", new StandardSQLFunction( "day", StandardBasicTypes.INTEGER ) ); registerFunction( "dayofweek", new StandardSQLFunction( "dayofweek", StandardBasicTypes.INTEGER ) ); registerFunction( "dayofyear", new StandardSQLFunction( "dayofyear", StandardBasicTypes.INTEGER ) ); registerFunction( "dayofmonth", new StandardSQLFunction( "dayofmonth", StandardBasicTypes.INTEGER ) ); registerFunction( "month", new StandardSQLFunction( "month", StandardBasicTypes.INTEGER ) ); registerFunction( "year", new StandardSQLFunction( "year", StandardBasicTypes.INTEGER ) ); registerFunction( "week", new StandardSQLFunction( "week", StandardBasicTypes.INTEGER ) ); registerFunction( "quarter", new StandardSQLFunction( "quarter", StandardBasicTypes.INTEGER ) ); registerFunction( "hour", new StandardSQLFunction( "hour", StandardBasicTypes.INTEGER ) ); registerFunction( "minute", new StandardSQLFunction( "minute", StandardBasicTypes.INTEGER ) ); registerFunction( "second", new SQLFunctionTemplate( StandardBasicTypes.INTEGER, "cast(second(?1) as int)" ) ); registerFunction( "dayname", new StandardSQLFunction( "dayname", StandardBasicTypes.STRING ) ); registerFunction( "monthname", new StandardSQLFunction( "monthname", StandardBasicTypes.STRING ) ); // numeric functions registerFunction( "abs", new StandardSQLFunction( "abs" ) ); registerFunction( "sign", new StandardSQLFunction( "sign", StandardBasicTypes.INTEGER ) ); registerFunction( "acos", new StandardSQLFunction( "acos", StandardBasicTypes.DOUBLE ) ); registerFunction( "asin", new StandardSQLFunction( "asin", StandardBasicTypes.DOUBLE ) ); registerFunction( "atan", new StandardSQLFunction( "atan", StandardBasicTypes.DOUBLE ) ); registerFunction( "cos", new StandardSQLFunction( "cos", StandardBasicTypes.DOUBLE ) ); registerFunction( "cot", new StandardSQLFunction( "cot", StandardBasicTypes.DOUBLE ) ); registerFunction( "exp", new StandardSQLFunction( "exp", StandardBasicTypes.DOUBLE ) ); registerFunction( "log", new StandardSQLFunction( "log", StandardBasicTypes.DOUBLE ) ); registerFunction( "log10", new StandardSQLFunction( "log10", StandardBasicTypes.DOUBLE ) ); registerFunction( "sin", new StandardSQLFunction( "sin", StandardBasicTypes.DOUBLE ) ); registerFunction( "sqrt", new StandardSQLFunction( "sqrt", StandardBasicTypes.DOUBLE ) ); registerFunction( "tan", new StandardSQLFunction( "tan", StandardBasicTypes.DOUBLE ) ); registerFunction( "pi", new NoArgSQLFunction( "pi", StandardBasicTypes.DOUBLE ) ); registerFunction( "rand", new StandardSQLFunction( "rand", StandardBasicTypes.FLOAT ) ); registerFunction( "radians", new StandardSQLFunction( "radians", StandardBasicTypes.DOUBLE ) ); registerFunction( "degrees", new StandardSQLFunction( "degrees", StandardBasicTypes.DOUBLE ) ); registerFunction( "round", new StandardSQLFunction( "round" ) ); registerFunction( "roundmagic", new StandardSQLFunction( "roundmagic" ) ); registerFunction( "truncate", new StandardSQLFunction( "truncate" ) ); registerFunction( "ceiling", new StandardSQLFunction( "ceiling" ) ); registerFunction( "floor", new StandardSQLFunction( "floor" ) ); // special functions // from v. 2.2.0 ROWNUM() is supported in all modes as the equivalent of Oracle ROWNUM if ( hsqldbVersion > 21 ) { registerFunction("rownum", new NoArgSQLFunction("rownum", StandardBasicTypes.INTEGER)); } // function templates registerFunction( "concat", new VarArgsSQLFunction( StandardBasicTypes.STRING, "(", "||", ")" ) ); getDefaultProperties().setProperty( Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE ); }
public HSQLDialect() { super(); try { Class props = ReflectHelper.classForName( "org.hsqldb.persist.HsqlDatabaseProperties" ); String versionString = (String) props.getDeclaredField( "THIS_VERSION" ).get( null ); hsqldbVersion = Integer.parseInt( versionString.substring( 0, 1 ) ) * 10; hsqldbVersion += Integer.parseInt( versionString.substring( 2, 3 ) ); } catch ( Throwable e ) { // must be a very old version } registerColumnType( Types.BIGINT, "bigint" ); registerColumnType( Types.BINARY, "binary($l)" ); registerColumnType( Types.BIT, "bit" ); registerColumnType( Types.BOOLEAN, "boolean" ); registerColumnType( Types.CHAR, "char($l)" ); registerColumnType( Types.DATE, "date" ); registerColumnType( Types.DECIMAL, "decimal($p,$s)" ); registerColumnType( Types.DOUBLE, "double" ); registerColumnType( Types.FLOAT, "float" ); registerColumnType( Types.INTEGER, "integer" ); registerColumnType( Types.LONGVARBINARY, "longvarbinary" ); registerColumnType( Types.LONGVARCHAR, "longvarchar" ); registerColumnType( Types.SMALLINT, "smallint" ); registerColumnType( Types.TINYINT, "tinyint" ); registerColumnType( Types.TIME, "time" ); registerColumnType( Types.TIMESTAMP, "timestamp" ); registerColumnType( Types.VARCHAR, "varchar($l)" ); registerColumnType( Types.VARBINARY, "varbinary($l)" ); if ( hsqldbVersion < 20 ) { registerColumnType( Types.NUMERIC, "numeric" ); } else { registerColumnType( Types.NUMERIC, "numeric($p,$s)" ); } //HSQL has no Blob/Clob support .... but just put these here for now! if ( hsqldbVersion < 20 ) { registerColumnType( Types.BLOB, "longvarbinary" ); registerColumnType( Types.CLOB, "longvarchar" ); } else { registerColumnType( Types.BLOB, "blob($l)" ); registerColumnType( Types.CLOB, "clob($l)" ); } // aggregate functions registerFunction( "avg", new AvgWithArgumentCastFunction( "double" ) ); // string functions registerFunction( "ascii", new StandardSQLFunction( "ascii", StandardBasicTypes.INTEGER ) ); registerFunction( "char", new StandardSQLFunction( "char", StandardBasicTypes.CHARACTER ) ); registerFunction( "lower", new StandardSQLFunction( "lower" ) ); registerFunction( "upper", new StandardSQLFunction( "upper" ) ); registerFunction( "lcase", new StandardSQLFunction( "lcase" ) ); registerFunction( "ucase", new StandardSQLFunction( "ucase" ) ); registerFunction( "soundex", new StandardSQLFunction( "soundex", StandardBasicTypes.STRING ) ); registerFunction( "ltrim", new StandardSQLFunction( "ltrim" ) ); registerFunction( "rtrim", new StandardSQLFunction( "rtrim" ) ); registerFunction( "reverse", new StandardSQLFunction( "reverse" ) ); registerFunction( "space", new StandardSQLFunction( "space", StandardBasicTypes.STRING ) ); registerFunction( "str", new SQLFunctionTemplate( StandardBasicTypes.STRING, "cast(?1 as varchar(256))" ) ); registerFunction( "to_char", new StandardSQLFunction( "to_char" ) ); registerFunction( "rawtohex", new StandardSQLFunction( "rawtohex" ) ); registerFunction( "hextoraw", new StandardSQLFunction( "hextoraw" ) ); // system functions registerFunction( "user", new NoArgSQLFunction( "user", StandardBasicTypes.STRING ) ); registerFunction( "database", new NoArgSQLFunction( "database", StandardBasicTypes.STRING ) ); // datetime functions if ( hsqldbVersion < 20 ) { registerFunction( "sysdate", new NoArgSQLFunction( "sysdate", StandardBasicTypes.DATE, false ) ); } else { registerFunction( "sysdate", new NoArgSQLFunction( "sysdate", StandardBasicTypes.TIMESTAMP, false ) ); } registerFunction( "current_date", new NoArgSQLFunction( "current_date", StandardBasicTypes.DATE, false ) ); registerFunction( "curdate", new NoArgSQLFunction( "curdate", StandardBasicTypes.DATE ) ); registerFunction( "current_timestamp", new NoArgSQLFunction( "current_timestamp", StandardBasicTypes.TIMESTAMP, false ) ); registerFunction( "now", new NoArgSQLFunction( "now", StandardBasicTypes.TIMESTAMP ) ); registerFunction( "current_time", new NoArgSQLFunction( "current_time", StandardBasicTypes.TIME, false ) ); registerFunction( "curtime", new NoArgSQLFunction( "curtime", StandardBasicTypes.TIME ) ); registerFunction( "day", new StandardSQLFunction( "day", StandardBasicTypes.INTEGER ) ); registerFunction( "dayofweek", new StandardSQLFunction( "dayofweek", StandardBasicTypes.INTEGER ) ); registerFunction( "dayofyear", new StandardSQLFunction( "dayofyear", StandardBasicTypes.INTEGER ) ); registerFunction( "dayofmonth", new StandardSQLFunction( "dayofmonth", StandardBasicTypes.INTEGER ) ); registerFunction( "month", new StandardSQLFunction( "month", StandardBasicTypes.INTEGER ) ); registerFunction( "year", new StandardSQLFunction( "year", StandardBasicTypes.INTEGER ) ); registerFunction( "week", new StandardSQLFunction( "week", StandardBasicTypes.INTEGER ) ); registerFunction( "quarter", new StandardSQLFunction( "quarter", StandardBasicTypes.INTEGER ) ); registerFunction( "hour", new StandardSQLFunction( "hour", StandardBasicTypes.INTEGER ) ); registerFunction( "minute", new StandardSQLFunction( "minute", StandardBasicTypes.INTEGER ) ); registerFunction( "second", new SQLFunctionTemplate( StandardBasicTypes.INTEGER, "cast(second(?1) as int)" ) ); registerFunction( "dayname", new StandardSQLFunction( "dayname", StandardBasicTypes.STRING ) ); registerFunction( "monthname", new StandardSQLFunction( "monthname", StandardBasicTypes.STRING ) ); // numeric functions registerFunction( "abs", new StandardSQLFunction( "abs" ) ); registerFunction( "sign", new StandardSQLFunction( "sign", StandardBasicTypes.INTEGER ) ); registerFunction( "acos", new StandardSQLFunction( "acos", StandardBasicTypes.DOUBLE ) ); registerFunction( "asin", new StandardSQLFunction( "asin", StandardBasicTypes.DOUBLE ) ); registerFunction( "atan", new StandardSQLFunction( "atan", StandardBasicTypes.DOUBLE ) ); registerFunction( "cos", new StandardSQLFunction( "cos", StandardBasicTypes.DOUBLE ) ); registerFunction( "cot", new StandardSQLFunction( "cot", StandardBasicTypes.DOUBLE ) ); registerFunction( "exp", new StandardSQLFunction( "exp", StandardBasicTypes.DOUBLE ) ); registerFunction( "log", new StandardSQLFunction( "log", StandardBasicTypes.DOUBLE ) ); registerFunction( "log10", new StandardSQLFunction( "log10", StandardBasicTypes.DOUBLE ) ); registerFunction( "sin", new StandardSQLFunction( "sin", StandardBasicTypes.DOUBLE ) ); registerFunction( "sqrt", new StandardSQLFunction( "sqrt", StandardBasicTypes.DOUBLE ) ); registerFunction( "tan", new StandardSQLFunction( "tan", StandardBasicTypes.DOUBLE ) ); registerFunction( "pi", new NoArgSQLFunction( "pi", StandardBasicTypes.DOUBLE ) ); registerFunction( "rand", new StandardSQLFunction( "rand", StandardBasicTypes.FLOAT ) ); registerFunction( "radians", new StandardSQLFunction( "radians", StandardBasicTypes.DOUBLE ) ); registerFunction( "degrees", new StandardSQLFunction( "degrees", StandardBasicTypes.DOUBLE ) ); registerFunction( "round", new StandardSQLFunction( "round" ) ); registerFunction( "roundmagic", new StandardSQLFunction( "roundmagic" ) ); registerFunction( "truncate", new StandardSQLFunction( "truncate" ) ); registerFunction( "ceiling", new StandardSQLFunction( "ceiling" ) ); registerFunction( "floor", new StandardSQLFunction( "floor" ) ); // special functions // from v. 2.2.0 ROWNUM() is supported in all modes as the equivalent of Oracle ROWNUM if ( hsqldbVersion > 21 ) { registerFunction("rownum", new NoArgSQLFunction("rownum", StandardBasicTypes.INTEGER)); } // function templates registerFunction( "concat", new VarArgsSQLFunction( StandardBasicTypes.STRING, "(", "||", ")" ) ); getDefaultProperties().setProperty( Environment.STATEMENT_BATCH_SIZE, DEFAULT_BATCH_SIZE ); }
diff --git a/src/com/wickedspiral/jacss/parser/Parser.java b/src/com/wickedspiral/jacss/parser/Parser.java index 00fc06b..d5f3ff4 100644 --- a/src/com/wickedspiral/jacss/parser/Parser.java +++ b/src/com/wickedspiral/jacss/parser/Parser.java @@ -1,695 +1,699 @@ /* * Portions of this code (specifically, regex-based hex constant compression) are * Copyright (c) 2007, Yahoo! Inc. All rights reserved. * Code licensed under the BSD License: * http://developer.yahoo.net/yui/license.txt */ package com.wickedspiral.jacss.parser; import com.google.common.base.Joiner; import com.wickedspiral.jacss.Options; import com.wickedspiral.jacss.lexer.Token; import com.wickedspiral.jacss.lexer.TokenListener; import java.io.PrintStream; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.regex.Matcher; import java.util.regex.Pattern; import static com.wickedspiral.jacss.lexer.Token.*; /** * @author wasche * @since 2011.08.04 */ public class Parser implements TokenListener { private static final Joiner NULL_JOINER = Joiner.on( "" ); private static final String MS_ALPHA = "progid:dximagetransform.microsoft.alpha(opacity="; private static final String MS_SHADOW = "progid:dximagetransform.microsoft.shadow"; private static final Collection<String> UNITS = new HashSet<>( Arrays.asList( "px", "em", "pt", "in", "cm", "mm", "pc", "ex", "deg", "s", "%" ) ); private final Collection<String> KEYWORDS = new HashSet<>( Arrays.asList( "normal", "bold", "italic", "serif", "sans-serif", "fixed" ) ); private static final Collection<String> BOUNDARY_OPS = new HashSet<>( Arrays.asList( "{", "}", "(", ")", ">", ";", ":", "," ) ); // or comment private static final Collection<String> DUAL_ZERO_PROPERTIES = new HashSet<>( Arrays.asList( "background-position", "-webkit-transform-origin", "-moz-transform-origin" ) ); private static final Collection<String> NONE_PROPERTIES = new HashSet<>(); // YUI only strips units or leading zero if the zero was preceded by colon or whitespace. // However it also strips whitespace following certain chars before doing that, causing // the zero logic to break everywhere. This list lets us be compatible with that bug. :-( private static final Collection<Token> YUI_NO_SPACE_AFTER = new HashSet<>( Arrays.asList(COMMA, LBRACE, RBRACE, COLON, SEMICOLON, LPAREN) ); static { NONE_PROPERTIES.add( "outline" ); for ( String property : new String[]{ "border", "margin", "padding" } ) { NONE_PROPERTIES.add( property ); for ( String edge : new String[]{ "top", "left", "bottom", "right" } ) { NONE_PROPERTIES.add( property + "-" + edge ); } } } // buffers private LinkedList<String> ruleBuffer; private LinkedList<String> valueBuffer; private LinkedList<String> rgbBuffer; private String pending; // flags private boolean inRule; private boolean space; private boolean charset; private boolean at; private boolean ie5mac; private boolean rgb; private int checkSpace; // other state private String property; private Token lastToken; private Token lastLastToken; private String lastValue; private boolean base64; private final PrintStream out; private final Options options; public Parser( PrintStream outputStream, Options options ) { out = outputStream; ruleBuffer = new LinkedList<>(); valueBuffer = new LinkedList<>(); rgbBuffer = new LinkedList<>(); inRule = false; space = false; charset = false; at = false; ie5mac = false; rgb = false; checkSpace = -1; this.options = options; if (! options.shouldLowercasifyKeywords()) { KEYWORDS.remove("sans-serif"); // Fix #25 } } // ++ Output functions private void output( Collection<String> strings ) { for ( String s : strings ) { output( s ); } } private void output( String str ) { out.print( str ); } private void dump( String str ) { ruleBuffer.add( pending ); ruleBuffer.add( str ); output( ruleBuffer ); ruleBuffer.clear(); pending = null; } private void write( String str ) { if ( str == null || str.length() == 0 ) return; if ( str.startsWith( "/*!" ) && ruleBuffer.isEmpty() ) { output( str ); return; } ruleBuffer.add( str ); if ( "}".equals( str ) || ";}".equals( str ) ) { // check for empty rule if ( ruleBuffer.size() < 2 || (ruleBuffer.size() >= 2 && !"{".equals( ruleBuffer.get( ruleBuffer.size() - 2 ) )) ) { output( ruleBuffer ); } ruleBuffer.clear(); } } private void buffer( String str ) { if ( str == null || str.length() == 0 ) return; if ( pending == null ) { pending = str; } else { write( pending ); pending = str; } } private void queue( String str ) { if ( str == null || str.length() == 0 ) return; if ( property != null ) { valueBuffer.add( str ); } else { buffer( str ); } } private void collapseValue() { String value = NULL_JOINER.join( valueBuffer ); valueBuffer.clear(); if ( "0 0".equals( value ) || "0 0 0 0".equals( value ) || "0 0 0".equals( value ) ) { if ( DUAL_ZERO_PROPERTIES.contains( property ) ) { buffer( "0 0" ); } else { buffer("0"); } } else if ("none".equals(value) && (NONE_PROPERTIES.contains(property) || "background".equals(property)) && options.shouldCollapseNone()) { buffer("0"); } else { buffer(value); } } private void space(boolean emit, String reason) { if (emit) { queue(" "); if (options.isDebug()) System.err.println("Emit space because: " + reason); } else { if (options.isDebug()) System.err.println("Hide space because: " + reason); } } // ++ TokenListener public void token(Token token, String value) { if (options.isDebug()) System.err.printf("Token: %s, value: %s, space? %b, in rule? %b\n", token, value, space, inRule); if (rgb) { if (NUMBER == token) { String h = Integer.toHexString(Integer.parseInt(value)).toLowerCase(); if (h.length() < 2) { h = "0" + h; } rgbBuffer.add(h); } else if (LPAREN == token) { if (NUMBER == lastToken) { space(true, "RGB value separator"); } queue("#"); rgbBuffer.clear(); } else if (RPAREN == token) { if (rgbBuffer.size() == 3) { String a = rgbBuffer.get(0); String b = rgbBuffer.get(1); String c = rgbBuffer.get(2); if (a.charAt(0) == a.charAt(1) && b.charAt(0) == b.charAt(1) && c.charAt(0) == c.charAt(1)) { queue(a.substring(0, 1)); queue(b.substring(0, 1)); queue(c.substring(0, 1)); rgb = false; return; } } for (String s : rgbBuffer) { queue(s); } rgb = false; } return; } if (token == WHITESPACE) { space = true; return; // most of these are unneeded } if (token == COMMENT) { // comments are only needed in a few places: // 1) special comments /*! ... */ if ('!' == value.charAt(2)) { queue(value); lastLastToken = lastToken; lastToken = token; lastValue = value; } // 2) IE5/Mac hack else if ('\\' == value.charAt(value.length()-3)) { queue("/*\\*/"); lastLastToken = lastToken; lastToken = token; lastValue = value; ie5mac = true; } else if (ie5mac) { // if the rule buffer starts with the opening ie5/mac comment hack, // and the rest of the buffer + pending would collapse to nothing, // suppress the hack comments if ( ruleBuffer.size() >= 2 && "}".equals( pending ) && !"{".equals( ruleBuffer.get( ruleBuffer.size() - 1 ) ) ) { queue("/**/"); } lastLastToken = lastToken; lastToken = token; lastValue = value; ie5mac = false; } // 3) After a child selector else if (GT == lastToken) { queue("/**/"); lastLastToken = lastToken; lastToken = token; lastValue = value; } return; } // make sure we have space between values for multi-value properties // margin: 5px 5px if ( inRule && ( ( NUMBER == lastToken && (HASH == token || NUMBER == token) ) || ( (IDENTIFIER == lastToken || PERCENT == lastToken || RPAREN == lastToken) && (NUMBER == token || IDENTIFIER == token || HASH == token) ) )) { space(true, "multi-value property separator"); space = false; } // rgb() if (IDENTIFIER == token && "rgb".equals(value)) { rgb = true; space = false; return; } // base64,data... if (IDENTIFIER == token && "base64".equals(value)) { queue(value); base64 = true; space = false; return; } if (AT == token) { queue(value); at = true; } else if (inRule && COLON == token && property == null) { queue(value); property = lastValue.toLowerCase(); valueBuffer.clear(); } // first-letter and first-line must be followed by a space else if (!inRule && COLON == lastToken && ("first-letter".equals(value) || "first-line".equals(value))) { queue(value); space(true, "first-letter or first-line"); } else if (SEMICOLON == token) { if (at) { // means at-directive with no rule body // (because seeing LBRACE sets at=false) at = false; if ("charset".equals(ruleBuffer.get(1))) { // e.g. // @charset "utf-8"; // can only appear once if (charset) { ruleBuffer.clear(); pending = null; } else { charset = true; dump(value); } } else { dump(value); } } else if (SEMICOLON == lastToken) { return; // skip duplicate semicolons } else { collapseValue(); valueBuffer.clear(); property = null; base64 = false; queue(value); } } else if (LBRACE == token) { if (checkSpace != -1) { // start of a rule, the space was correct checkSpace = -1; } if (at) { at = false; dump(value); } else { inRule = true; queue(value); } } else if (RBRACE == token) { if (checkSpace != -1) { // didn't start a rule, space was wrong ruleBuffer.remove(checkSpace); checkSpace = -1; } if (!valueBuffer.isEmpty()) { collapseValue(); } if (";".equals(pending)) { if (options.keepTailingSemicolons()) { buffer(";"); } pending = value; } else if (options.addTrailingSemicolons()) // Fix #19 { buffer(";" + value); } else { buffer(value); } property = null; base64 = false; inRule = false; } else if (!inRule) { if (!space || GT == token || lastToken == null || BOUNDARY_OPS.contains( lastValue )) { queue(value); } else { if (COLON == token) { checkSpace = ruleBuffer.size() + 1; // include pending value } if (COMMENT != lastToken && !BOUNDARY_OPS.contains( lastValue ) && (!BOUNDARY_OPS.contains(value) || COLON == token)) { space(true, "needs comment"); } queue(value); space = false; } } // at this point we're in a rule body and looking at something that continues // the current property value (the property name is set several steps above, // when we see the COLON after it) else if (NUMBER == token && value.startsWith("0.")) { boolean yuiCanCollapse = COLON == lastToken || !YUI_NO_SPACE_AFTER.contains(lastToken); if ( options.shouldCollapseZeroes() || yuiCanCollapse ) { queue(value.substring(1)); } else { queue( value ); } } else if (STRING == token && "-ms-filter".equals(property)) { String v = value.toLowerCase(); if (options.shouldCompressMicrosoft() && v.startsWith(MS_ALPHA, 1)) { String c = value.substring(0, 1); String o = value.substring(MS_ALPHA.length()+1, value.length()-2); queue(c); queue("alpha(opacity="); queue(o); queue(")"); queue(c); } else if (options.shouldCompressMicrosoft() && v.startsWith(MS_SHADOW, 1)) { queue(value.replaceAll(", +", ",")); } else { queue(value); } } else if (STRING == token && options.shouldCleanXmlStrings() && value.contains("svg+xml")) { queue(cleanXml(value)); } else if (EQUALS == token) { queue(value); StringBuilder sb = new StringBuilder(); for (String s : valueBuffer) { sb.append(s); } if (options.shouldCompressMicrosoft() && MS_ALPHA.equals(sb.toString().toLowerCase())) { buffer("alpha(opacity="); valueBuffer.clear(); } } else { String v = value.toLowerCase(); // values of 0 don't need a unit if (NUMBER == lastToken && "0".equals(lastValue) && (PERCENT == token || IDENTIFIER == token)) { boolean stripIt = COLON == lastLastToken || !YUI_NO_SPACE_AFTER.contains(lastLastToken); if (options.keepUnitsWithZero() && !stripIt) { queue(value); } else if (options.keepUnitsWithZero() && ("deg".equals(value) || "s".equals(value))) { queue(value); } else if (!UNITS.contains(value)) { space(true, "0 unknown units"); queue(value); } } // use 0 instead of none else if (COLON == lastToken && "none".equals(value) && NONE_PROPERTIES.contains(property) && options.shouldCollapseNone()) { queue("0"); } // force properties to lower case for better gzip compression else if (COLON != lastToken && IDENTIFIER == token) { // #aabbcc if (HASH == lastToken) { boolean eq = value.length() == 6 && v.charAt(0) == v.charAt(1) && v.charAt(2) == v.charAt(3) && v.charAt(4) == v.charAt(5); if (!options.shouldLowercasifyRgb()) { v = value; } if (eq) { queue(v.substring(0, 1)); queue(v.substring(2, 3)); queue(v.substring(4, 5)); } else { queue(v); } } else { if ( space && !BOUNDARY_OPS.contains( lastValue ) && BANG != token ) { space(true, "need comment"); } if ((property == null && !base64)|| KEYWORDS.contains(v)) { queue(v); } else { queue(value); } } } // nothing special, just send it along else { if ( space && BANG != token && !BOUNDARY_OPS.contains(value) && !BOUNDARY_OPS.contains(lastValue)) { space(true, "between token and non-boundary op"); } + else if (space && QUESTION == token) + { + space(true, "before question mark"); + } if (KEYWORDS.contains(v)) { queue(v); } else { queue(value); } } } lastLastToken = lastToken; lastToken = token; lastValue = value; space = false; } public void end() { write(pending); if (!ruleBuffer.isEmpty()) { output(ruleBuffer); } } // Fix #32 -- YUI indiscriminately compresses some SVG content. private final static Pattern multipleSpaces = Pattern.compile("\\s\\s\\s*"); private final static Pattern trailingSpace = Pattern.compile("([>,])\\s+"); private final static Pattern leadingZero = Pattern.compile("([ :])0\\."); private final static Pattern hexString = Pattern.compile("([^\"'=\\s])(\\s*)#([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])"); private String cleanXml(String s) { s = multipleSpaces.matcher(s).replaceAll(" "); s = trailingSpace.matcher(s).replaceAll("$1"); s = leadingZero.matcher(s).replaceAll("$1."); Matcher m = hexString.matcher(s); StringBuffer sb = new StringBuffer(); while (m.find()) { // Test for AABBCC pattern if (m.group(3).equalsIgnoreCase(m.group(4)) && m.group(5).equalsIgnoreCase(m.group(6)) && m.group(7).equalsIgnoreCase(m.group(8))) { m.appendReplacement(sb, m.group(1) + m.group(2) + "#" + m.group(3) + m.group(5) + m.group(7)); } else { m.appendReplacement(sb, m.group()); } } m.appendTail(sb); s = sb.toString(); return s; } }
true
true
public void token(Token token, String value) { if (options.isDebug()) System.err.printf("Token: %s, value: %s, space? %b, in rule? %b\n", token, value, space, inRule); if (rgb) { if (NUMBER == token) { String h = Integer.toHexString(Integer.parseInt(value)).toLowerCase(); if (h.length() < 2) { h = "0" + h; } rgbBuffer.add(h); } else if (LPAREN == token) { if (NUMBER == lastToken) { space(true, "RGB value separator"); } queue("#"); rgbBuffer.clear(); } else if (RPAREN == token) { if (rgbBuffer.size() == 3) { String a = rgbBuffer.get(0); String b = rgbBuffer.get(1); String c = rgbBuffer.get(2); if (a.charAt(0) == a.charAt(1) && b.charAt(0) == b.charAt(1) && c.charAt(0) == c.charAt(1)) { queue(a.substring(0, 1)); queue(b.substring(0, 1)); queue(c.substring(0, 1)); rgb = false; return; } } for (String s : rgbBuffer) { queue(s); } rgb = false; } return; } if (token == WHITESPACE) { space = true; return; // most of these are unneeded } if (token == COMMENT) { // comments are only needed in a few places: // 1) special comments /*! ... */ if ('!' == value.charAt(2)) { queue(value); lastLastToken = lastToken; lastToken = token; lastValue = value; } // 2) IE5/Mac hack else if ('\\' == value.charAt(value.length()-3)) { queue("/*\\*/"); lastLastToken = lastToken; lastToken = token; lastValue = value; ie5mac = true; } else if (ie5mac) { // if the rule buffer starts with the opening ie5/mac comment hack, // and the rest of the buffer + pending would collapse to nothing, // suppress the hack comments if ( ruleBuffer.size() >= 2 && "}".equals( pending ) && !"{".equals( ruleBuffer.get( ruleBuffer.size() - 1 ) ) ) { queue("/**/"); } lastLastToken = lastToken; lastToken = token; lastValue = value; ie5mac = false; } // 3) After a child selector else if (GT == lastToken) { queue("/**/"); lastLastToken = lastToken; lastToken = token; lastValue = value; } return; } // make sure we have space between values for multi-value properties // margin: 5px 5px if ( inRule && ( ( NUMBER == lastToken && (HASH == token || NUMBER == token) ) || ( (IDENTIFIER == lastToken || PERCENT == lastToken || RPAREN == lastToken) && (NUMBER == token || IDENTIFIER == token || HASH == token) ) )) { space(true, "multi-value property separator"); space = false; } // rgb() if (IDENTIFIER == token && "rgb".equals(value)) { rgb = true; space = false; return; } // base64,data... if (IDENTIFIER == token && "base64".equals(value)) { queue(value); base64 = true; space = false; return; } if (AT == token) { queue(value); at = true; } else if (inRule && COLON == token && property == null) { queue(value); property = lastValue.toLowerCase(); valueBuffer.clear(); } // first-letter and first-line must be followed by a space else if (!inRule && COLON == lastToken && ("first-letter".equals(value) || "first-line".equals(value))) { queue(value); space(true, "first-letter or first-line"); } else if (SEMICOLON == token) { if (at) { // means at-directive with no rule body // (because seeing LBRACE sets at=false) at = false; if ("charset".equals(ruleBuffer.get(1))) { // e.g. // @charset "utf-8"; // can only appear once if (charset) { ruleBuffer.clear(); pending = null; } else { charset = true; dump(value); } } else { dump(value); } } else if (SEMICOLON == lastToken) { return; // skip duplicate semicolons } else { collapseValue(); valueBuffer.clear(); property = null; base64 = false; queue(value); } } else if (LBRACE == token) { if (checkSpace != -1) { // start of a rule, the space was correct checkSpace = -1; } if (at) { at = false; dump(value); } else { inRule = true; queue(value); } } else if (RBRACE == token) { if (checkSpace != -1) { // didn't start a rule, space was wrong ruleBuffer.remove(checkSpace); checkSpace = -1; } if (!valueBuffer.isEmpty()) { collapseValue(); } if (";".equals(pending)) { if (options.keepTailingSemicolons()) { buffer(";"); } pending = value; } else if (options.addTrailingSemicolons()) // Fix #19 { buffer(";" + value); } else { buffer(value); } property = null; base64 = false; inRule = false; } else if (!inRule) { if (!space || GT == token || lastToken == null || BOUNDARY_OPS.contains( lastValue )) { queue(value); } else { if (COLON == token) { checkSpace = ruleBuffer.size() + 1; // include pending value } if (COMMENT != lastToken && !BOUNDARY_OPS.contains( lastValue ) && (!BOUNDARY_OPS.contains(value) || COLON == token)) { space(true, "needs comment"); } queue(value); space = false; } } // at this point we're in a rule body and looking at something that continues // the current property value (the property name is set several steps above, // when we see the COLON after it) else if (NUMBER == token && value.startsWith("0.")) { boolean yuiCanCollapse = COLON == lastToken || !YUI_NO_SPACE_AFTER.contains(lastToken); if ( options.shouldCollapseZeroes() || yuiCanCollapse ) { queue(value.substring(1)); } else { queue( value ); } } else if (STRING == token && "-ms-filter".equals(property)) { String v = value.toLowerCase(); if (options.shouldCompressMicrosoft() && v.startsWith(MS_ALPHA, 1)) { String c = value.substring(0, 1); String o = value.substring(MS_ALPHA.length()+1, value.length()-2); queue(c); queue("alpha(opacity="); queue(o); queue(")"); queue(c); } else if (options.shouldCompressMicrosoft() && v.startsWith(MS_SHADOW, 1)) { queue(value.replaceAll(", +", ",")); } else { queue(value); } } else if (STRING == token && options.shouldCleanXmlStrings() && value.contains("svg+xml")) { queue(cleanXml(value)); } else if (EQUALS == token) { queue(value); StringBuilder sb = new StringBuilder(); for (String s : valueBuffer) { sb.append(s); } if (options.shouldCompressMicrosoft() && MS_ALPHA.equals(sb.toString().toLowerCase())) { buffer("alpha(opacity="); valueBuffer.clear(); } } else { String v = value.toLowerCase(); // values of 0 don't need a unit if (NUMBER == lastToken && "0".equals(lastValue) && (PERCENT == token || IDENTIFIER == token)) { boolean stripIt = COLON == lastLastToken || !YUI_NO_SPACE_AFTER.contains(lastLastToken); if (options.keepUnitsWithZero() && !stripIt) { queue(value); } else if (options.keepUnitsWithZero() && ("deg".equals(value) || "s".equals(value))) { queue(value); } else if (!UNITS.contains(value)) { space(true, "0 unknown units"); queue(value); } } // use 0 instead of none else if (COLON == lastToken && "none".equals(value) && NONE_PROPERTIES.contains(property) && options.shouldCollapseNone()) { queue("0"); } // force properties to lower case for better gzip compression else if (COLON != lastToken && IDENTIFIER == token) { // #aabbcc if (HASH == lastToken) { boolean eq = value.length() == 6 && v.charAt(0) == v.charAt(1) && v.charAt(2) == v.charAt(3) && v.charAt(4) == v.charAt(5); if (!options.shouldLowercasifyRgb()) { v = value; } if (eq) { queue(v.substring(0, 1)); queue(v.substring(2, 3)); queue(v.substring(4, 5)); } else { queue(v); } } else { if ( space && !BOUNDARY_OPS.contains( lastValue ) && BANG != token ) { space(true, "need comment"); } if ((property == null && !base64)|| KEYWORDS.contains(v)) { queue(v); } else { queue(value); } } } // nothing special, just send it along else { if ( space && BANG != token && !BOUNDARY_OPS.contains(value) && !BOUNDARY_OPS.contains(lastValue)) { space(true, "between token and non-boundary op"); } if (KEYWORDS.contains(v)) { queue(v); } else { queue(value); } } } lastLastToken = lastToken; lastToken = token; lastValue = value; space = false; }
public void token(Token token, String value) { if (options.isDebug()) System.err.printf("Token: %s, value: %s, space? %b, in rule? %b\n", token, value, space, inRule); if (rgb) { if (NUMBER == token) { String h = Integer.toHexString(Integer.parseInt(value)).toLowerCase(); if (h.length() < 2) { h = "0" + h; } rgbBuffer.add(h); } else if (LPAREN == token) { if (NUMBER == lastToken) { space(true, "RGB value separator"); } queue("#"); rgbBuffer.clear(); } else if (RPAREN == token) { if (rgbBuffer.size() == 3) { String a = rgbBuffer.get(0); String b = rgbBuffer.get(1); String c = rgbBuffer.get(2); if (a.charAt(0) == a.charAt(1) && b.charAt(0) == b.charAt(1) && c.charAt(0) == c.charAt(1)) { queue(a.substring(0, 1)); queue(b.substring(0, 1)); queue(c.substring(0, 1)); rgb = false; return; } } for (String s : rgbBuffer) { queue(s); } rgb = false; } return; } if (token == WHITESPACE) { space = true; return; // most of these are unneeded } if (token == COMMENT) { // comments are only needed in a few places: // 1) special comments /*! ... */ if ('!' == value.charAt(2)) { queue(value); lastLastToken = lastToken; lastToken = token; lastValue = value; } // 2) IE5/Mac hack else if ('\\' == value.charAt(value.length()-3)) { queue("/*\\*/"); lastLastToken = lastToken; lastToken = token; lastValue = value; ie5mac = true; } else if (ie5mac) { // if the rule buffer starts with the opening ie5/mac comment hack, // and the rest of the buffer + pending would collapse to nothing, // suppress the hack comments if ( ruleBuffer.size() >= 2 && "}".equals( pending ) && !"{".equals( ruleBuffer.get( ruleBuffer.size() - 1 ) ) ) { queue("/**/"); } lastLastToken = lastToken; lastToken = token; lastValue = value; ie5mac = false; } // 3) After a child selector else if (GT == lastToken) { queue("/**/"); lastLastToken = lastToken; lastToken = token; lastValue = value; } return; } // make sure we have space between values for multi-value properties // margin: 5px 5px if ( inRule && ( ( NUMBER == lastToken && (HASH == token || NUMBER == token) ) || ( (IDENTIFIER == lastToken || PERCENT == lastToken || RPAREN == lastToken) && (NUMBER == token || IDENTIFIER == token || HASH == token) ) )) { space(true, "multi-value property separator"); space = false; } // rgb() if (IDENTIFIER == token && "rgb".equals(value)) { rgb = true; space = false; return; } // base64,data... if (IDENTIFIER == token && "base64".equals(value)) { queue(value); base64 = true; space = false; return; } if (AT == token) { queue(value); at = true; } else if (inRule && COLON == token && property == null) { queue(value); property = lastValue.toLowerCase(); valueBuffer.clear(); } // first-letter and first-line must be followed by a space else if (!inRule && COLON == lastToken && ("first-letter".equals(value) || "first-line".equals(value))) { queue(value); space(true, "first-letter or first-line"); } else if (SEMICOLON == token) { if (at) { // means at-directive with no rule body // (because seeing LBRACE sets at=false) at = false; if ("charset".equals(ruleBuffer.get(1))) { // e.g. // @charset "utf-8"; // can only appear once if (charset) { ruleBuffer.clear(); pending = null; } else { charset = true; dump(value); } } else { dump(value); } } else if (SEMICOLON == lastToken) { return; // skip duplicate semicolons } else { collapseValue(); valueBuffer.clear(); property = null; base64 = false; queue(value); } } else if (LBRACE == token) { if (checkSpace != -1) { // start of a rule, the space was correct checkSpace = -1; } if (at) { at = false; dump(value); } else { inRule = true; queue(value); } } else if (RBRACE == token) { if (checkSpace != -1) { // didn't start a rule, space was wrong ruleBuffer.remove(checkSpace); checkSpace = -1; } if (!valueBuffer.isEmpty()) { collapseValue(); } if (";".equals(pending)) { if (options.keepTailingSemicolons()) { buffer(";"); } pending = value; } else if (options.addTrailingSemicolons()) // Fix #19 { buffer(";" + value); } else { buffer(value); } property = null; base64 = false; inRule = false; } else if (!inRule) { if (!space || GT == token || lastToken == null || BOUNDARY_OPS.contains( lastValue )) { queue(value); } else { if (COLON == token) { checkSpace = ruleBuffer.size() + 1; // include pending value } if (COMMENT != lastToken && !BOUNDARY_OPS.contains( lastValue ) && (!BOUNDARY_OPS.contains(value) || COLON == token)) { space(true, "needs comment"); } queue(value); space = false; } } // at this point we're in a rule body and looking at something that continues // the current property value (the property name is set several steps above, // when we see the COLON after it) else if (NUMBER == token && value.startsWith("0.")) { boolean yuiCanCollapse = COLON == lastToken || !YUI_NO_SPACE_AFTER.contains(lastToken); if ( options.shouldCollapseZeroes() || yuiCanCollapse ) { queue(value.substring(1)); } else { queue( value ); } } else if (STRING == token && "-ms-filter".equals(property)) { String v = value.toLowerCase(); if (options.shouldCompressMicrosoft() && v.startsWith(MS_ALPHA, 1)) { String c = value.substring(0, 1); String o = value.substring(MS_ALPHA.length()+1, value.length()-2); queue(c); queue("alpha(opacity="); queue(o); queue(")"); queue(c); } else if (options.shouldCompressMicrosoft() && v.startsWith(MS_SHADOW, 1)) { queue(value.replaceAll(", +", ",")); } else { queue(value); } } else if (STRING == token && options.shouldCleanXmlStrings() && value.contains("svg+xml")) { queue(cleanXml(value)); } else if (EQUALS == token) { queue(value); StringBuilder sb = new StringBuilder(); for (String s : valueBuffer) { sb.append(s); } if (options.shouldCompressMicrosoft() && MS_ALPHA.equals(sb.toString().toLowerCase())) { buffer("alpha(opacity="); valueBuffer.clear(); } } else { String v = value.toLowerCase(); // values of 0 don't need a unit if (NUMBER == lastToken && "0".equals(lastValue) && (PERCENT == token || IDENTIFIER == token)) { boolean stripIt = COLON == lastLastToken || !YUI_NO_SPACE_AFTER.contains(lastLastToken); if (options.keepUnitsWithZero() && !stripIt) { queue(value); } else if (options.keepUnitsWithZero() && ("deg".equals(value) || "s".equals(value))) { queue(value); } else if (!UNITS.contains(value)) { space(true, "0 unknown units"); queue(value); } } // use 0 instead of none else if (COLON == lastToken && "none".equals(value) && NONE_PROPERTIES.contains(property) && options.shouldCollapseNone()) { queue("0"); } // force properties to lower case for better gzip compression else if (COLON != lastToken && IDENTIFIER == token) { // #aabbcc if (HASH == lastToken) { boolean eq = value.length() == 6 && v.charAt(0) == v.charAt(1) && v.charAt(2) == v.charAt(3) && v.charAt(4) == v.charAt(5); if (!options.shouldLowercasifyRgb()) { v = value; } if (eq) { queue(v.substring(0, 1)); queue(v.substring(2, 3)); queue(v.substring(4, 5)); } else { queue(v); } } else { if ( space && !BOUNDARY_OPS.contains( lastValue ) && BANG != token ) { space(true, "need comment"); } if ((property == null && !base64)|| KEYWORDS.contains(v)) { queue(v); } else { queue(value); } } } // nothing special, just send it along else { if ( space && BANG != token && !BOUNDARY_OPS.contains(value) && !BOUNDARY_OPS.contains(lastValue)) { space(true, "between token and non-boundary op"); } else if (space && QUESTION == token) { space(true, "before question mark"); } if (KEYWORDS.contains(v)) { queue(v); } else { queue(value); } } } lastLastToken = lastToken; lastToken = token; lastValue = value; space = false; }
diff --git a/CustomSpawners/src/com/github/thebiologist13/commands/entities/EntityVelocityCommand.java b/CustomSpawners/src/com/github/thebiologist13/commands/entities/EntityVelocityCommand.java index cf4f2dc..c806cb3 100644 --- a/CustomSpawners/src/com/github/thebiologist13/commands/entities/EntityVelocityCommand.java +++ b/CustomSpawners/src/com/github/thebiologist13/commands/entities/EntityVelocityCommand.java @@ -1,132 +1,132 @@ package com.github.thebiologist13.commands.entities; import java.util.logging.Logger; import org.bukkit.ChatColor; import org.bukkit.command.Command; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import org.bukkit.util.Vector; import com.github.thebiologist13.CustomSpawners; import com.github.thebiologist13.SpawnableEntity; import com.github.thebiologist13.commands.SpawnerCommand; public class EntityVelocityCommand extends SpawnerCommand { private CustomSpawners plugin = null; private Logger log = null; public EntityVelocityCommand(CustomSpawners plugin) { this.plugin = plugin; this.log = plugin.log; } @Override public void run(CommandSender arg0, Command arg1, String arg2, String[] arg3) { //Command Syntax = /customspawners setvelocity [id] <x,y,z> //Array Index with selection 0 1 //Without selection 0 1 2 //Player Player p = null; //Entity SpawnableEntity s = null; //Vector components - int x = 0; - int y = 0; - int z = 0; + double x = 0; + double y = 0; + double z = 0; //Permissions String perm = "customspawners.entities.setvelocity"; final String COMMAND_FORMAT = ChatColor.RED + "Invalid values for velocity. Please use the following format: " + ChatColor.GOLD + "/entities setvelocity <x value>,<y value>,<z value>."; if(!(arg0 instanceof Player)) { log.info(NO_CONSOLE); return; } p = (Player) arg0; if(p.hasPermission(perm)) { if(CustomSpawners.entitySelection.containsKey(p) && arg3.length == 2) { s = plugin.getEntityById(CustomSpawners.entitySelection.get(p)); int firstCommaIndex = arg3[1].indexOf(","); int secondCommaIndex = arg3[1].indexOf(",", firstCommaIndex + 1); String xVal = arg3[1].substring(0, firstCommaIndex); String yVal = arg3[1].substring(firstCommaIndex + 1, secondCommaIndex); String zVal = arg3[1].substring(secondCommaIndex + 1, arg3[1].length()); - if(!plugin.isDouble(xVal) ||!plugin.isDouble(yVal) || !plugin.isDouble(zVal)) { + if(!plugin.isDouble(xVal) || !plugin.isDouble(yVal) || !plugin.isDouble(zVal)) { p.sendMessage(COMMAND_FORMAT); return; } - x = Integer.parseInt(xVal); - y = Integer.parseInt(yVal); - z = Integer.parseInt(zVal); + x = Double.parseDouble(xVal); + y = Double.parseDouble(yVal); + z = Double.parseDouble(zVal); } else if(arg3.length == 2) { p.sendMessage(NEEDS_SELECTION); return; } else if(arg3.length == 3) { int id = 0; //Check that the ID entered is a number if(!plugin.isInteger(arg3[1])) { p.sendMessage(ID_NOT_NUMBER); return; } id = Integer.parseInt(arg3[1]); //Check if the ID entered is the ID of a entity if(!plugin.isValidEntity(id)) { p.sendMessage(NO_ID); return; } s = plugin.getEntityById(id); int firstCommaIndex = arg3[2].indexOf(","); int secondCommaIndex = arg3[2].indexOf(",", firstCommaIndex + 1); String xVal = arg3[2].substring(0, firstCommaIndex); String yVal = arg3[2].substring(firstCommaIndex + 1, secondCommaIndex); String zVal = arg3[2].substring(secondCommaIndex + 1, arg3[2].length()); if(!plugin.isDouble(xVal) ||!plugin.isDouble(yVal) || !plugin.isDouble(zVal)) { p.sendMessage(COMMAND_FORMAT); return; } x = Integer.parseInt(xVal); y = Integer.parseInt(yVal); z = Integer.parseInt(zVal); } else { p.sendMessage(GENERAL_ERROR); return; } //Carry out command s.setVelocity(new Vector(x,y,z)); //Success p.sendMessage(ChatColor.GREEN + "Successfully set the velocity of spawnable entity with ID " + ChatColor.GOLD + s.getId() + ChatColor.GREEN + " to " + ChatColor.GOLD + "(" + x + "," + y + "," + z + ")" + ChatColor.GREEN + "!"); } else { p.sendMessage(NO_PERMISSION); return; } } }
false
true
public void run(CommandSender arg0, Command arg1, String arg2, String[] arg3) { //Command Syntax = /customspawners setvelocity [id] <x,y,z> //Array Index with selection 0 1 //Without selection 0 1 2 //Player Player p = null; //Entity SpawnableEntity s = null; //Vector components int x = 0; int y = 0; int z = 0; //Permissions String perm = "customspawners.entities.setvelocity"; final String COMMAND_FORMAT = ChatColor.RED + "Invalid values for velocity. Please use the following format: " + ChatColor.GOLD + "/entities setvelocity <x value>,<y value>,<z value>."; if(!(arg0 instanceof Player)) { log.info(NO_CONSOLE); return; } p = (Player) arg0; if(p.hasPermission(perm)) { if(CustomSpawners.entitySelection.containsKey(p) && arg3.length == 2) { s = plugin.getEntityById(CustomSpawners.entitySelection.get(p)); int firstCommaIndex = arg3[1].indexOf(","); int secondCommaIndex = arg3[1].indexOf(",", firstCommaIndex + 1); String xVal = arg3[1].substring(0, firstCommaIndex); String yVal = arg3[1].substring(firstCommaIndex + 1, secondCommaIndex); String zVal = arg3[1].substring(secondCommaIndex + 1, arg3[1].length()); if(!plugin.isDouble(xVal) ||!plugin.isDouble(yVal) || !plugin.isDouble(zVal)) { p.sendMessage(COMMAND_FORMAT); return; } x = Integer.parseInt(xVal); y = Integer.parseInt(yVal); z = Integer.parseInt(zVal); } else if(arg3.length == 2) { p.sendMessage(NEEDS_SELECTION); return; } else if(arg3.length == 3) { int id = 0; //Check that the ID entered is a number if(!plugin.isInteger(arg3[1])) { p.sendMessage(ID_NOT_NUMBER); return; } id = Integer.parseInt(arg3[1]); //Check if the ID entered is the ID of a entity if(!plugin.isValidEntity(id)) { p.sendMessage(NO_ID); return; } s = plugin.getEntityById(id); int firstCommaIndex = arg3[2].indexOf(","); int secondCommaIndex = arg3[2].indexOf(",", firstCommaIndex + 1); String xVal = arg3[2].substring(0, firstCommaIndex); String yVal = arg3[2].substring(firstCommaIndex + 1, secondCommaIndex); String zVal = arg3[2].substring(secondCommaIndex + 1, arg3[2].length()); if(!plugin.isDouble(xVal) ||!plugin.isDouble(yVal) || !plugin.isDouble(zVal)) { p.sendMessage(COMMAND_FORMAT); return; } x = Integer.parseInt(xVal); y = Integer.parseInt(yVal); z = Integer.parseInt(zVal); } else { p.sendMessage(GENERAL_ERROR); return; } //Carry out command s.setVelocity(new Vector(x,y,z)); //Success p.sendMessage(ChatColor.GREEN + "Successfully set the velocity of spawnable entity with ID " + ChatColor.GOLD + s.getId() + ChatColor.GREEN + " to " + ChatColor.GOLD + "(" + x + "," + y + "," + z + ")" + ChatColor.GREEN + "!"); } else { p.sendMessage(NO_PERMISSION); return; } }
public void run(CommandSender arg0, Command arg1, String arg2, String[] arg3) { //Command Syntax = /customspawners setvelocity [id] <x,y,z> //Array Index with selection 0 1 //Without selection 0 1 2 //Player Player p = null; //Entity SpawnableEntity s = null; //Vector components double x = 0; double y = 0; double z = 0; //Permissions String perm = "customspawners.entities.setvelocity"; final String COMMAND_FORMAT = ChatColor.RED + "Invalid values for velocity. Please use the following format: " + ChatColor.GOLD + "/entities setvelocity <x value>,<y value>,<z value>."; if(!(arg0 instanceof Player)) { log.info(NO_CONSOLE); return; } p = (Player) arg0; if(p.hasPermission(perm)) { if(CustomSpawners.entitySelection.containsKey(p) && arg3.length == 2) { s = plugin.getEntityById(CustomSpawners.entitySelection.get(p)); int firstCommaIndex = arg3[1].indexOf(","); int secondCommaIndex = arg3[1].indexOf(",", firstCommaIndex + 1); String xVal = arg3[1].substring(0, firstCommaIndex); String yVal = arg3[1].substring(firstCommaIndex + 1, secondCommaIndex); String zVal = arg3[1].substring(secondCommaIndex + 1, arg3[1].length()); if(!plugin.isDouble(xVal) || !plugin.isDouble(yVal) || !plugin.isDouble(zVal)) { p.sendMessage(COMMAND_FORMAT); return; } x = Double.parseDouble(xVal); y = Double.parseDouble(yVal); z = Double.parseDouble(zVal); } else if(arg3.length == 2) { p.sendMessage(NEEDS_SELECTION); return; } else if(arg3.length == 3) { int id = 0; //Check that the ID entered is a number if(!plugin.isInteger(arg3[1])) { p.sendMessage(ID_NOT_NUMBER); return; } id = Integer.parseInt(arg3[1]); //Check if the ID entered is the ID of a entity if(!plugin.isValidEntity(id)) { p.sendMessage(NO_ID); return; } s = plugin.getEntityById(id); int firstCommaIndex = arg3[2].indexOf(","); int secondCommaIndex = arg3[2].indexOf(",", firstCommaIndex + 1); String xVal = arg3[2].substring(0, firstCommaIndex); String yVal = arg3[2].substring(firstCommaIndex + 1, secondCommaIndex); String zVal = arg3[2].substring(secondCommaIndex + 1, arg3[2].length()); if(!plugin.isDouble(xVal) ||!plugin.isDouble(yVal) || !plugin.isDouble(zVal)) { p.sendMessage(COMMAND_FORMAT); return; } x = Integer.parseInt(xVal); y = Integer.parseInt(yVal); z = Integer.parseInt(zVal); } else { p.sendMessage(GENERAL_ERROR); return; } //Carry out command s.setVelocity(new Vector(x,y,z)); //Success p.sendMessage(ChatColor.GREEN + "Successfully set the velocity of spawnable entity with ID " + ChatColor.GOLD + s.getId() + ChatColor.GREEN + " to " + ChatColor.GOLD + "(" + x + "," + y + "," + z + ")" + ChatColor.GREEN + "!"); } else { p.sendMessage(NO_PERMISSION); return; } }
diff --git a/src/main/java/cn/uc/udac/bolts/BoltUsrLastUrl.java b/src/main/java/cn/uc/udac/bolts/BoltUsrLastUrl.java index f42ed86..340bacd 100644 --- a/src/main/java/cn/uc/udac/bolts/BoltUsrLastUrl.java +++ b/src/main/java/cn/uc/udac/bolts/BoltUsrLastUrl.java @@ -1,112 +1,112 @@ /* * BoltUsrLastUrl * * 1.0 记录usr最近访问的url * * [email protected] */ package cn.uc.udac.bolts; import java.net.URL; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import redis.clients.jedis.Jedis; import backtype.storm.task.OutputCollector; import backtype.storm.task.TopologyContext; import backtype.storm.topology.BasicOutputCollector; import backtype.storm.topology.OutputFieldsDeclarer; import backtype.storm.topology.base.BaseBasicBolt; import backtype.storm.topology.base.BaseRichBolt; import backtype.storm.tuple.Fields; import backtype.storm.tuple.Tuple; import backtype.storm.tuple.Values; public class BoltUsrLastUrl extends BaseBasicBolt { static public Logger LOG = Logger.getLogger(BoltUsrLastUrl.class); private Map _conf; private Set<String> _newsSites; private Jedis[] _arrRedisUsrLastUrl; private int _count = 0; private void init(Map conf) { try { List<String> hosts = (List<String>)conf.get("usr_last_url_redis_hosts"); int port = ( (Long)conf.get("redis_port") ).intValue(); _arrRedisUsrLastUrl = new Jedis[hosts.size()]; for (int i = 0; i < hosts.size(); ++i) { _arrRedisUsrLastUrl[i] = new Jedis(hosts.get(i), port); } List<String> sites = (List<String>)conf.get("news_sites"); _newsSites = new HashSet<String>(sites); } catch (Exception e) { LOG.info("BoltUsrLastUrl.init.exception:", e); } } @Override public void prepare(Map conf, TopologyContext context) { _conf = conf; init(_conf); } private int hash(String key, int size) { int h = 0; for (int i = 0; i < key.length(); ++i) { h += key.codePointAt(i); } return h % size; } @Override public void execute(Tuple input, BasicOutputCollector collector) { try { String time = input.getString(0); String usr = input.getString(3); String url = input.getString(5); String site = new URL(url).getHost(); if (!_newsSites.contains(site)) return; String key = "UsrLastUrl`" + usr; int h = hash(key, _arrRedisUsrLastUrl.length); int seconds = 24 * 3600; String refer = _arrRedisUsrLastUrl[h].get(key); if (++_count % 1000 == 0) { LOG.info(String.format("BoltUsrLastUrl %d: time=%s, usr=%s, url=%s, refer=%s, key=%s", _count, time, usr, url, refer, key)); } if (refer != null && !url.equals(refer)) { collector.emit(new Values(time, refer, url)); } - _arrRedisUsrLastUrl[h].rpush(key, url); + _arrRedisUsrLastUrl[h].set(key, url); _arrRedisUsrLastUrl[h].expire(key, seconds); } catch (Exception e) { LOG.info("BoltUsrLastUrl.execute.exception:", e); init(_conf); } } @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declare(new Fields("time", "refer", "url")); } }
true
true
public void execute(Tuple input, BasicOutputCollector collector) { try { String time = input.getString(0); String usr = input.getString(3); String url = input.getString(5); String site = new URL(url).getHost(); if (!_newsSites.contains(site)) return; String key = "UsrLastUrl`" + usr; int h = hash(key, _arrRedisUsrLastUrl.length); int seconds = 24 * 3600; String refer = _arrRedisUsrLastUrl[h].get(key); if (++_count % 1000 == 0) { LOG.info(String.format("BoltUsrLastUrl %d: time=%s, usr=%s, url=%s, refer=%s, key=%s", _count, time, usr, url, refer, key)); } if (refer != null && !url.equals(refer)) { collector.emit(new Values(time, refer, url)); } _arrRedisUsrLastUrl[h].rpush(key, url); _arrRedisUsrLastUrl[h].expire(key, seconds); } catch (Exception e) { LOG.info("BoltUsrLastUrl.execute.exception:", e); init(_conf); } }
public void execute(Tuple input, BasicOutputCollector collector) { try { String time = input.getString(0); String usr = input.getString(3); String url = input.getString(5); String site = new URL(url).getHost(); if (!_newsSites.contains(site)) return; String key = "UsrLastUrl`" + usr; int h = hash(key, _arrRedisUsrLastUrl.length); int seconds = 24 * 3600; String refer = _arrRedisUsrLastUrl[h].get(key); if (++_count % 1000 == 0) { LOG.info(String.format("BoltUsrLastUrl %d: time=%s, usr=%s, url=%s, refer=%s, key=%s", _count, time, usr, url, refer, key)); } if (refer != null && !url.equals(refer)) { collector.emit(new Values(time, refer, url)); } _arrRedisUsrLastUrl[h].set(key, url); _arrRedisUsrLastUrl[h].expire(key, seconds); } catch (Exception e) { LOG.info("BoltUsrLastUrl.execute.exception:", e); init(_conf); } }
diff --git a/hot-deploy/opentaps-common/src/common/org/opentaps/common/util/UtilDate.java b/hot-deploy/opentaps-common/src/common/org/opentaps/common/util/UtilDate.java index fd26eed36..86ab78502 100644 --- a/hot-deploy/opentaps-common/src/common/org/opentaps/common/util/UtilDate.java +++ b/hot-deploy/opentaps-common/src/common/org/opentaps/common/util/UtilDate.java @@ -1,512 +1,512 @@ /* * Copyright (c) 2006 - 2009 Open Source Strategies, Inc. * * This program is free software; you can redistribute it and/or modify * it under the terms of the Honest Public License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * Honest Public License for more details. * * You should have received a copy of the Honest Public License * along with this program; if not, write to Funambol, * 643 Bair Island Road, Suite 305 - Redwood City, CA 94063, USA */ package org.opentaps.common.util; import java.sql.Timestamp; import java.text.DateFormat; import java.text.ParseException; import java.text.ParsePosition; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.StringTokenizer; import java.util.TimeZone; import javolution.util.FastMap; import org.apache.commons.validator.routines.CalendarValidator; import org.ofbiz.base.util.Debug; import org.ofbiz.base.util.UtilDateTime; import org.ofbiz.base.util.UtilValidate; /** * UtilDate - A place for date helper methods. */ public abstract class UtilDate { private static final String MODULE = UtilDate.class.getName(); // Utility class should not be instantiated. private UtilDate() { } /** Number of milliseconds in a day. */ private static final long MS_IN_A_DAY = 24 * 60 * 60 * 1000; /** * JDBC escape format for java.sql.Date conversions. */ public static final String DATE_FORMAT = "yyyy-MM-dd"; /** * JDBC escape format for java.sql.Time conversions. */ public static final String TIME_FORMAT = "HH:mm:ss"; /** * Default pattern that <code>getJsDateTimeFormat</code> can return in case of error or * if given pattern element isn't supported by jscalendar . */ private final static String fallBackJSPattern = "%Y-%m-%d %H:%M:%S.0"; /** * Parses a timestamp into fields suitable for selection of default values for AM/PM based form widgets. * * Please note that this routine can also take a time-only or date-only string (in the localized format), * under which case the returned date field will be set to null; this can be useful * for setting a default time, without having to specify a default date. * * @param timestamp a <code>String</code> representing a timestamp * @param timeZone the <code>TimeZone</code> to use for conversion * @param locale the <code>Locale</code> to use for conversion * @return a <code>Map</code> containing the parsed values for "date", "hour", "minute" and "ampm", empty <code>Map</code> if there was a problem */ public static Map<String, Object> timestampToAmPm(String timestamp, TimeZone timeZone, Locale locale) { Map<String, Object> map = FastMap.newInstance(); int hour = 12; int minute = 0; String ampm = "AM"; String date = null; boolean hasDate = false; boolean hasTime = false; if (UtilValidate.isEmpty(timestamp)) { return map; } try { // There are no robust algorithm to split date/time in unconditioned format. // Let's try convert timestamp using different patterns and see at result. ParsePosition pos = new ParsePosition(0); SimpleDateFormat df = new SimpleDateFormat(getTimeFormat(locale), locale); //df.setTimeZone(timeZone); Date dateObj = df.parse(timestamp, pos); if (dateObj != null) { hasTime = true; } if (dateObj == null) { pos.setIndex(0); String dateTimeFormat = getDateTimeFormat(locale); df = new SimpleDateFormat(dateTimeFormat, locale); //df.setTimeZone(timeZone); dateObj = df.parse(timestamp, pos); if (dateObj != null && UtilDate.isDateTime(timestamp, dateTimeFormat, locale, timeZone)) { hasTime = true; hasDate = true; } } if (dateObj == null) { pos.setIndex(0); String dateFormat = getDateFormat(locale); df = new SimpleDateFormat(dateFormat, locale); //df.setTimeZone(timeZone); dateObj = df.parse(timestamp, pos); if (dateObj != null && UtilDate.isDateTime(timestamp, dateFormat, locale, timeZone)) { hasDate = true; } } if (dateObj == null) { return map; } Calendar calendar = Calendar.getInstance(/*timeZone,*/ locale); calendar.setTime(dateObj); if (hasDate) { df = new SimpleDateFormat(getDateFormat(locale), locale); //df.setTimeZone(timeZone); date = df.format(calendar.getTime()); } if (hasTime) { hour = calendar.get(Calendar.HOUR); if (hour == 0) { hour = 12; } minute = calendar.get(Calendar.MINUTE); if (calendar.get(Calendar.AM_PM) == Calendar.PM) { ampm = "PM"; } } } catch (IllegalArgumentException iae) { Debug.logWarning(iae.getLocalizedMessage(), MODULE); return map; } if (UtilValidate.isNotEmpty(date)) { map.put("date", date); } map.put("hour", new Integer(hour)); map.put("ampm", ampm); map.put("minute", new Integer(minute)); return map; } /** * Converts a <code>String</code> into a <code>Timestamp</code> value. * @param timestampString a timestamp <code>String</code> in JDBC timestamp escape (yyyy-MM-dd hh:mm:ss.fff) or ISO standard (yyyy-MM-dd) format * @param timeZone the <code>TimeZone</code> to use for conversion * @param locale the <code>Locale</code> to use for conversion * @return the <code>Timestamp</code> corresponding to the given <code>String</code> */ public static Timestamp toTimestamp(String timestampString, TimeZone timeZone, Locale locale) { String dateFormat = null; if (UtilValidate.isEmpty(timestampString)) { return null; } if (timestampString.matches("^\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d+$")) { // JDBC format dateFormat = "yyyy-MM-dd HH:mm:ss.S"; } else if (timestampString.matches("^\\d{4}-\\d{2}-\\d{2}$")) { // ISO format dateFormat = "yyyy-MM-dd"; } else if (timestampString.indexOf(" ") != -1) { // Date and time in localized format dateFormat = getDateTimeFormat(locale); } else { // date in localized format dateFormat = getDateFormat(locale); } if (!UtilDate.isDateTime(timestampString, dateFormat, locale)) { // timestampString doesn't match pattern return null; } Date parsedDate = null; try { DateFormat df = UtilDateTime.toDateTimeFormat(dateFormat, timeZone, locale); parsedDate = df.parse(timestampString); } catch (ParseException e) { return null; } return new Timestamp(parsedDate.getTime()); } /** * Returns the number of days between the beginning of two days. * This value is always positive. * @param one first <code>Timestamp</code> value * @param two second <code>Timestamp</code> value * @return the absolute value of the number of days between the two given <code>Timestamp</code> */ public static Integer dateDifference(Timestamp one, Timestamp two) { Calendar first = Calendar.getInstance(); Calendar second = Calendar.getInstance(); first.setTime(one); second.setTime(two); // set to the beginning of the day first.set(Calendar.HOUR_OF_DAY, 0); first.set(Calendar.MINUTE, 0); first.set(Calendar.SECOND, 0); second.set(Calendar.HOUR_OF_DAY, 0); second.set(Calendar.MINUTE, 0); second.set(Calendar.SECOND, 0); double msdiff = first.getTimeInMillis() - second.getTimeInMillis(); long days = Math.round(msdiff / MS_IN_A_DAY); return new Integer((int) Math.abs(days)); } /** * Returns appropriate time format string. * @deprecated was removed from ofbiz, re-added for backward compatibility * @param locale User's locale, may be <code>null</code> * @return Time format string */ @Deprecated public static String getTimeFormat(Locale locale) { if (locale == null) { locale = Locale.getDefault(); } int timeStyle = -1; if (TIME_FORMAT == null || "DEFAULT".equals(TIME_FORMAT) || "SHORT".equals(TIME_FORMAT)) { timeStyle = DateFormat.SHORT; } else if ("MEDIUM".equals(TIME_FORMAT)) { timeStyle = DateFormat.MEDIUM; } else if ("LONG".equals(TIME_FORMAT)) { timeStyle = DateFormat.LONG; } else { return TIME_FORMAT; } SimpleDateFormat df = (SimpleDateFormat) SimpleDateFormat.getTimeInstance(timeStyle, locale); return df.toPattern(); } /** * Returns appropriate date + time format string. * @deprecated was removed from ofbiz, re-added for backward compatibility * @param locale User's locale, may be <code>null</code>. * @return Date/time format string */ @Deprecated public static String getDateTimeFormat(Locale locale) { if (locale == null) { locale = Locale.getDefault(); } int dateStyle = -1; if (DATE_FORMAT == null || "DEFAULT".equals(DATE_FORMAT) || "SHORT".equals(DATE_FORMAT)) { dateStyle = DateFormat.SHORT; } else if ("MEDIUM".equals(DATE_FORMAT)) { dateStyle = DateFormat.MEDIUM; } else if ("LONG".equals(DATE_FORMAT)) { dateStyle = DateFormat.LONG; } int timeStyle = -1; if (TIME_FORMAT == null || "DEFAULT".equals(TIME_FORMAT) || "SHORT".equals(TIME_FORMAT)) { timeStyle = DateFormat.SHORT; } else if ("MEDIUM".equals(TIME_FORMAT)) { timeStyle = DateFormat.MEDIUM; } else if ("LONG".equals(TIME_FORMAT)) { timeStyle = DateFormat.LONG; } if (dateStyle >= 0 && timeStyle >= 0) { SimpleDateFormat df = (SimpleDateFormat) SimpleDateFormat.getDateTimeInstance(dateStyle, timeStyle, locale); return df.toPattern(); } if (dateStyle >= 0 && timeStyle == -1) { SimpleDateFormat df = (SimpleDateFormat) SimpleDateFormat.getDateInstance(dateStyle, locale); return (df.toPattern() + " " + TIME_FORMAT); } if (dateStyle == -1 && timeStyle == -1) { return DATE_FORMAT + " " + TIME_FORMAT; } SimpleDateFormat df = (SimpleDateFormat) SimpleDateFormat.getTimeInstance(timeStyle, locale); return DATE_FORMAT + " " + df.toPattern(); } /** * Returns appropriate date format string. * @deprecated was removed from ofbiz, re-added for backward compatibility * @param locale User's locale, may be <code>null</code> * @return Date format string */ @Deprecated public static String getDateFormat(Locale locale) { if (locale == null) { locale = Locale.getDefault(); } int dateStyle = -1; if (DATE_FORMAT == null || "DEFAULT".equals(DATE_FORMAT) || "SHORT".equals(DATE_FORMAT)) { dateStyle = DateFormat.SHORT; } else if ("MEDIUM".equals(DATE_FORMAT)) { dateStyle = DateFormat.MEDIUM; } else if ("LONG".equals(DATE_FORMAT)) { dateStyle = DateFormat.LONG; } else { return DATE_FORMAT; } SimpleDateFormat df = (SimpleDateFormat) SimpleDateFormat.getDateInstance(dateStyle, locale); return df.toPattern(); } /** * Returns a Calendar object initialized to the specified date/time, time zone, * and locale. * * @param date date/time to use * @param timeZone the timezone, optional, will use the default timezone if <code>null</code> * @param locale the locale, optional, will use the default locale if <code>null</code> * @return Calendar object * @see java.util.Calendar */ public static Calendar toCalendar(Date date, TimeZone timeZone, Locale locale) { if (timeZone == null) { timeZone = TimeZone.getDefault(); Debug.logWarning("Null timeZone, using default: " + timeZone, MODULE); } if (locale == null) { locale = Locale.getDefault(); Debug.logWarning("Null locale, using default: " + locale, MODULE); } Calendar cal = Calendar.getInstance(timeZone, locale); if (date != null) { cal.setTime(date); } return cal; } /** * Method converts given date/time pattern in SimpleDateFormat style to form that can be used by * jscalendar.<br>Called from FTL and form widget rendering code for setup calendar. * * @param pattern Pattern to convert. Results of <code>getDate[Time]Format(locale)</code> as a rule. * @return Date/time format pattern that conforms to <b>jscalendar</b> requirements. */ public static String getJsDateTimeFormat(String pattern) { if (UtilValidate.isEmpty(pattern)) { - throw new IllegalArgumentException("UtilDateTime.getJsDateTimeFormat: Pattern string can't be empty."); + throw new IllegalArgumentException("UtilDate.getJsDateTimeFormat: Pattern string can't be empty."); } /* * The table contains translation rules. * Column number equals to placeholder length. * For example: * Row {"%m", "%m", "%b", "%B"}, // M (Month) * represents how we should translate following patterns * "M" -> "%m", "MM" -> "%m", "MMM" -> "%b", "MMMM" -> "%B" * * Translation inpissible if array element equals to null. * This means usualy that jscalendar has no equivalent for some Java * pattern symbol and method returns fallBackJSPattern constant. */ final String[][] translationTable = { {null, null, null, null}, // G (Era designator) {null, "%y", "%Y", "%Y"}, // y (Year) {"%m", "%m", "%b", "%B"}, // M (Month) {"%e", "%d", "%d", "%d"}, // d (Day in month) {null, null, null, null}, // k (Hour in day 1-24) {"%k", "%H", "%H", "%H"}, // H (Hour in day 0-23) {"%M", "%M", "%M", "%M"}, // m (Minute in hour) {"%S", "%S", "%S", "%S"}, // s (Second in minute) {null, null, null, null}, // S (Millisecond) {"%a", "%a", "%a", "%A"}, // E (Day in week) {"%j", "%j", "%j", "%j"}, // D (Day in year) {"%w", "%w", "%w", "%w"}, // F (Day of week in month) {"%W", "%W", "%W", "%W"}, // w (Week in year) {null, null, null, null}, // W (Week in month) {"%p", "%p", "%p", "%p"}, // a (Am/pm marker) {"%l", "%I", null, null}, // h (Hour in am/pm 1-12) {null, null, null, null}, // K (Hour in am/pm 0-11) {null, null, null, null}, // z (Time zone) {null, null, null, null} // Z (Time zone/RFC-822) }; String javaDateFormat = pattern; /* Unlocalized date/time pattern characters. */ final String patternChars = "GyMdkHmsSEDFwWahKzZ"; // all others chars in source string are separators between fields. List<String> tokens = Arrays.asList(javaDateFormat.split("[" + patternChars + "]")); String separators = ""; Iterator<String> iterator = tokens.iterator(); while (iterator.hasNext()) { String token = iterator.next(); if (UtilValidate.isNotEmpty(token) && separators.indexOf(token) == -1) { separators += token; } } // Going over pattern elements and replace it by those in translation table StringBuffer jsDateFormat = new StringBuffer(); StringTokenizer tokenizer = new StringTokenizer(javaDateFormat, separators, true); while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if (UtilValidate.isEmpty(token)) { continue; } int index = patternChars.indexOf(token.charAt(0)); if (index == -1) { // token is fixed part of pattern jsDateFormat.append(token); continue; } String jsPlaceholder = null; try { // token is placeholder that we should replce by equivalent from table jsPlaceholder = translationTable[index][token.length() - 1]; } catch (IndexOutOfBoundsException e) { // specified Java pattern have some placeholder with length grater than supported Debug.logError(e, "Wrong placeholder [" + token + "] in date/time pattern. Probably too long, maximum 4 chars allowed.", MODULE); return fallBackJSPattern; } if (UtilValidate.isEmpty(jsPlaceholder)) { //Ouch! jscalendar doesn't support milliseconds but some parts of framework // require it. Just replace miiseconds with zero symbol. if (token.startsWith("S")) { jsDateFormat.append("0"); continue; } // Source pattern contains something that we can't translate. Return fallback pattern. Debug.logError("Translation of date/time pattern [" + javaDateFormat + "] to jscalendar format is failed as jscalendar doesn't support placeholder [" + token + "]. Returns fallback pattern " + fallBackJSPattern, MODULE); return fallBackJSPattern; } // add new element to target pattern jsDateFormat.append(jsPlaceholder); } return jsDateFormat.toString(); } /** * Verify if date/time string match pattern and is valid. * @param value a <code>String</code> value * @param pattern a <code>String</code> value * @param locale a <code>Locale</code> value * @param timeZone a <code>TimeZone</code> value * @return a <code>boolean</code> value */ public static boolean isDateTime(String value, String pattern, Locale locale, TimeZone timeZone) { CalendarValidator validator = new CalendarValidator(); if (timeZone == null) { return (validator.validate(value, pattern, locale) != null); } return (validator.validate(value, pattern, locale, timeZone) != null); } /** * Verify if date/time string match pattern and is valid. * @param value a <code>String</code> value * @param pattern a <code>String</code> value * @param locale a <code>Locale</code> value * @return a <code>boolean</code> value */ public static boolean isDateTime(String value, String pattern, Locale locale) { return isDateTime(value, pattern, locale, null); } /** * Verify either date/time string conforms timestamp pattern. * @param value a <code>String</code> value * @return a <code>boolean</code> value */ public static boolean isTimestamp(String value) { if (value.length() == 10) { return isDateTime(value, "yyyy-MM-dd", Locale.getDefault()); } else { return isDateTime(value, "yyyy-MM-dd HH:mm:ss.S", Locale.getDefault()); } } }
true
true
public static String getJsDateTimeFormat(String pattern) { if (UtilValidate.isEmpty(pattern)) { throw new IllegalArgumentException("UtilDateTime.getJsDateTimeFormat: Pattern string can't be empty."); } /* * The table contains translation rules. * Column number equals to placeholder length. * For example: * Row {"%m", "%m", "%b", "%B"}, // M (Month) * represents how we should translate following patterns * "M" -> "%m", "MM" -> "%m", "MMM" -> "%b", "MMMM" -> "%B" * * Translation inpissible if array element equals to null. * This means usualy that jscalendar has no equivalent for some Java * pattern symbol and method returns fallBackJSPattern constant. */ final String[][] translationTable = { {null, null, null, null}, // G (Era designator) {null, "%y", "%Y", "%Y"}, // y (Year) {"%m", "%m", "%b", "%B"}, // M (Month) {"%e", "%d", "%d", "%d"}, // d (Day in month) {null, null, null, null}, // k (Hour in day 1-24) {"%k", "%H", "%H", "%H"}, // H (Hour in day 0-23) {"%M", "%M", "%M", "%M"}, // m (Minute in hour) {"%S", "%S", "%S", "%S"}, // s (Second in minute) {null, null, null, null}, // S (Millisecond) {"%a", "%a", "%a", "%A"}, // E (Day in week) {"%j", "%j", "%j", "%j"}, // D (Day in year) {"%w", "%w", "%w", "%w"}, // F (Day of week in month) {"%W", "%W", "%W", "%W"}, // w (Week in year) {null, null, null, null}, // W (Week in month) {"%p", "%p", "%p", "%p"}, // a (Am/pm marker) {"%l", "%I", null, null}, // h (Hour in am/pm 1-12) {null, null, null, null}, // K (Hour in am/pm 0-11) {null, null, null, null}, // z (Time zone) {null, null, null, null} // Z (Time zone/RFC-822) }; String javaDateFormat = pattern; /* Unlocalized date/time pattern characters. */ final String patternChars = "GyMdkHmsSEDFwWahKzZ"; // all others chars in source string are separators between fields. List<String> tokens = Arrays.asList(javaDateFormat.split("[" + patternChars + "]")); String separators = ""; Iterator<String> iterator = tokens.iterator(); while (iterator.hasNext()) { String token = iterator.next(); if (UtilValidate.isNotEmpty(token) && separators.indexOf(token) == -1) { separators += token; } } // Going over pattern elements and replace it by those in translation table StringBuffer jsDateFormat = new StringBuffer(); StringTokenizer tokenizer = new StringTokenizer(javaDateFormat, separators, true); while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if (UtilValidate.isEmpty(token)) { continue; } int index = patternChars.indexOf(token.charAt(0)); if (index == -1) { // token is fixed part of pattern jsDateFormat.append(token); continue; } String jsPlaceholder = null; try { // token is placeholder that we should replce by equivalent from table jsPlaceholder = translationTable[index][token.length() - 1]; } catch (IndexOutOfBoundsException e) { // specified Java pattern have some placeholder with length grater than supported Debug.logError(e, "Wrong placeholder [" + token + "] in date/time pattern. Probably too long, maximum 4 chars allowed.", MODULE); return fallBackJSPattern; } if (UtilValidate.isEmpty(jsPlaceholder)) { //Ouch! jscalendar doesn't support milliseconds but some parts of framework // require it. Just replace miiseconds with zero symbol. if (token.startsWith("S")) { jsDateFormat.append("0"); continue; } // Source pattern contains something that we can't translate. Return fallback pattern. Debug.logError("Translation of date/time pattern [" + javaDateFormat + "] to jscalendar format is failed as jscalendar doesn't support placeholder [" + token + "]. Returns fallback pattern " + fallBackJSPattern, MODULE); return fallBackJSPattern; } // add new element to target pattern jsDateFormat.append(jsPlaceholder); } return jsDateFormat.toString(); }
public static String getJsDateTimeFormat(String pattern) { if (UtilValidate.isEmpty(pattern)) { throw new IllegalArgumentException("UtilDate.getJsDateTimeFormat: Pattern string can't be empty."); } /* * The table contains translation rules. * Column number equals to placeholder length. * For example: * Row {"%m", "%m", "%b", "%B"}, // M (Month) * represents how we should translate following patterns * "M" -> "%m", "MM" -> "%m", "MMM" -> "%b", "MMMM" -> "%B" * * Translation inpissible if array element equals to null. * This means usualy that jscalendar has no equivalent for some Java * pattern symbol and method returns fallBackJSPattern constant. */ final String[][] translationTable = { {null, null, null, null}, // G (Era designator) {null, "%y", "%Y", "%Y"}, // y (Year) {"%m", "%m", "%b", "%B"}, // M (Month) {"%e", "%d", "%d", "%d"}, // d (Day in month) {null, null, null, null}, // k (Hour in day 1-24) {"%k", "%H", "%H", "%H"}, // H (Hour in day 0-23) {"%M", "%M", "%M", "%M"}, // m (Minute in hour) {"%S", "%S", "%S", "%S"}, // s (Second in minute) {null, null, null, null}, // S (Millisecond) {"%a", "%a", "%a", "%A"}, // E (Day in week) {"%j", "%j", "%j", "%j"}, // D (Day in year) {"%w", "%w", "%w", "%w"}, // F (Day of week in month) {"%W", "%W", "%W", "%W"}, // w (Week in year) {null, null, null, null}, // W (Week in month) {"%p", "%p", "%p", "%p"}, // a (Am/pm marker) {"%l", "%I", null, null}, // h (Hour in am/pm 1-12) {null, null, null, null}, // K (Hour in am/pm 0-11) {null, null, null, null}, // z (Time zone) {null, null, null, null} // Z (Time zone/RFC-822) }; String javaDateFormat = pattern; /* Unlocalized date/time pattern characters. */ final String patternChars = "GyMdkHmsSEDFwWahKzZ"; // all others chars in source string are separators between fields. List<String> tokens = Arrays.asList(javaDateFormat.split("[" + patternChars + "]")); String separators = ""; Iterator<String> iterator = tokens.iterator(); while (iterator.hasNext()) { String token = iterator.next(); if (UtilValidate.isNotEmpty(token) && separators.indexOf(token) == -1) { separators += token; } } // Going over pattern elements and replace it by those in translation table StringBuffer jsDateFormat = new StringBuffer(); StringTokenizer tokenizer = new StringTokenizer(javaDateFormat, separators, true); while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); if (UtilValidate.isEmpty(token)) { continue; } int index = patternChars.indexOf(token.charAt(0)); if (index == -1) { // token is fixed part of pattern jsDateFormat.append(token); continue; } String jsPlaceholder = null; try { // token is placeholder that we should replce by equivalent from table jsPlaceholder = translationTable[index][token.length() - 1]; } catch (IndexOutOfBoundsException e) { // specified Java pattern have some placeholder with length grater than supported Debug.logError(e, "Wrong placeholder [" + token + "] in date/time pattern. Probably too long, maximum 4 chars allowed.", MODULE); return fallBackJSPattern; } if (UtilValidate.isEmpty(jsPlaceholder)) { //Ouch! jscalendar doesn't support milliseconds but some parts of framework // require it. Just replace miiseconds with zero symbol. if (token.startsWith("S")) { jsDateFormat.append("0"); continue; } // Source pattern contains something that we can't translate. Return fallback pattern. Debug.logError("Translation of date/time pattern [" + javaDateFormat + "] to jscalendar format is failed as jscalendar doesn't support placeholder [" + token + "]. Returns fallback pattern " + fallBackJSPattern, MODULE); return fallBackJSPattern; } // add new element to target pattern jsDateFormat.append(jsPlaceholder); } return jsDateFormat.toString(); }
diff --git a/src/main/java/com/alexrnl/subtitlecorrector/io/subrip/SubRipWriter.java b/src/main/java/com/alexrnl/subtitlecorrector/io/subrip/SubRipWriter.java index cfa82d7..bc173b7 100644 --- a/src/main/java/com/alexrnl/subtitlecorrector/io/subrip/SubRipWriter.java +++ b/src/main/java/com/alexrnl/subtitlecorrector/io/subrip/SubRipWriter.java @@ -1,74 +1,75 @@ package com.alexrnl.subtitlecorrector.io.subrip; import java.io.BufferedWriter; import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.text.SimpleDateFormat; import java.util.logging.Level; import java.util.logging.Logger; import com.alexrnl.subtitlecorrector.common.Subtitle; import com.alexrnl.subtitlecorrector.common.SubtitleFile; import com.alexrnl.subtitlecorrector.io.SubtitleWriter; /** * Writer for the SubRip format.<br /> * @author Alex */ public class SubRipWriter extends SubtitleWriter { /** Logger */ private static Logger lg = Logger.getLogger(SubRipWriter.class.getName()); /** The date formatter */ private final SimpleDateFormat dateFormat; /** The subtitle counter (required by SubRip format) */ private Integer subtitleCounter; /** * Constructor #1.<br /> * Default constructor, uses UTF-8 for writing the file. */ public SubRipWriter () { this(StandardCharsets.UTF_8); } /** * Constructor #2.<br /> * @param charSet * the character to use for this writer. */ public SubRipWriter (final Charset charSet) { super(charSet); dateFormat = new SimpleDateFormat(SubRip.SUBRIP_DATE_FORMAT); subtitleCounter = null; } @Override protected void writeHeader (final SubtitleFile file, final BufferedWriter writer) throws IOException { if (subtitleCounter != null) { throw new IllegalStateException("It seems that the previous file was not finished fully writen"); } subtitleCounter = 0; } @Override protected void writeFooter (final SubtitleFile file, final BufferedWriter writer) throws IOException { if (lg.isLoggable(Level.INFO)) { lg.info("Successfully writen " + subtitleCounter + " subtitles"); } subtitleCounter = null; } @Override protected void writeSubtitle (final Subtitle subtitle, final BufferedWriter writer) throws IOException { - writer.write(Integer.valueOf(++subtitleCounter).toString()); + subtitleCounter++; + writer.write(subtitleCounter.toString()); writer.write(System.lineSeparator()); writer.write(dateFormat.format(subtitle.getBegin())); writer.write(SubRip.SUBRIP_DATE_SEPARATOR); writer.write(dateFormat.format(subtitle.getEnd())); writer.write(System.lineSeparator()); writer.write(subtitle.getContent()); writer.write(System.lineSeparator()); } }
true
true
protected void writeSubtitle (final Subtitle subtitle, final BufferedWriter writer) throws IOException { writer.write(Integer.valueOf(++subtitleCounter).toString()); writer.write(System.lineSeparator()); writer.write(dateFormat.format(subtitle.getBegin())); writer.write(SubRip.SUBRIP_DATE_SEPARATOR); writer.write(dateFormat.format(subtitle.getEnd())); writer.write(System.lineSeparator()); writer.write(subtitle.getContent()); writer.write(System.lineSeparator()); }
protected void writeSubtitle (final Subtitle subtitle, final BufferedWriter writer) throws IOException { subtitleCounter++; writer.write(subtitleCounter.toString()); writer.write(System.lineSeparator()); writer.write(dateFormat.format(subtitle.getBegin())); writer.write(SubRip.SUBRIP_DATE_SEPARATOR); writer.write(dateFormat.format(subtitle.getEnd())); writer.write(System.lineSeparator()); writer.write(subtitle.getContent()); writer.write(System.lineSeparator()); }
diff --git a/viewmodel-impl/impl/src/main/java/org/cytoscape/view/model/internal/CyNetworkViewManagerImpl.java b/viewmodel-impl/impl/src/main/java/org/cytoscape/view/model/internal/CyNetworkViewManagerImpl.java index d6cc6facb..a5b1462a0 100644 --- a/viewmodel-impl/impl/src/main/java/org/cytoscape/view/model/internal/CyNetworkViewManagerImpl.java +++ b/viewmodel-impl/impl/src/main/java/org/cytoscape/view/model/internal/CyNetworkViewManagerImpl.java @@ -1,140 +1,140 @@ /* File: CyNetworkViewManagerImpl.java Copyright (c) 2006, 2010-2011, The Cytoscape Consortium (www.cytoscape.org) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY, WITHOUT EVEN THE IMPLIED WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. The software and documentation provided hereunder is on an "as is" basis, and the Institute for Systems Biology and the Whitehead Institute have no obligations to provide maintenance, support, updates, enhancements or modifications. In no event shall the Institute for Systems Biology and the Whitehead Institute be liable to any party for direct, indirect, special, incidental or consequential damages, including lost profits, arising out of the use of this software and its documentation, even if the Institute for Systems Biology and the Whitehead Institute have been advised of the possibility of such damage. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. */ package org.cytoscape.view.model.internal; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.cytoscape.event.CyEventHelper; import org.cytoscape.model.CyNetwork; import org.cytoscape.model.events.NetworkAboutToBeDestroyedEvent; import org.cytoscape.model.events.NetworkAboutToBeDestroyedListener; import org.cytoscape.view.model.CyNetworkView; import org.cytoscape.view.model.CyNetworkViewManager; import org.cytoscape.view.model.events.NetworkViewAboutToBeDestroyedEvent; import org.cytoscape.view.model.events.NetworkViewAddedEvent; import org.cytoscape.view.model.events.NetworkViewDestroyedEvent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An implementation of CyNetworkViewManager. */ public class CyNetworkViewManagerImpl implements CyNetworkViewManager, NetworkAboutToBeDestroyedListener { private static final Logger logger = LoggerFactory.getLogger(CyNetworkViewManagerImpl.class); private final Map<CyNetwork, CyNetworkView> networkViewMap; private final CyEventHelper cyEventHelper; /** * * @param cyEventHelper */ public CyNetworkViewManagerImpl(final CyEventHelper cyEventHelper) { networkViewMap = new HashMap<CyNetwork, CyNetworkView>(); this.cyEventHelper = cyEventHelper; } @Override public synchronized void reset() { networkViewMap.clear(); } @Override public synchronized void handleEvent(final NetworkAboutToBeDestroyedEvent event) { final CyNetwork network = event.getNetwork(); if (viewExists(network)) destroyNetworkView(networkViewMap.get(network)); } @Override public synchronized Set<CyNetworkView> getNetworkViewSet() { return new HashSet<CyNetworkView>(networkViewMap.values()); } @Override public synchronized CyNetworkView getNetworkView(CyNetwork network) { return networkViewMap.get(network); } @Override public synchronized boolean viewExists(CyNetwork network) { return networkViewMap.containsKey(network); } @Override public void destroyNetworkView(CyNetworkView view) { if (view == null) throw new NullPointerException("view is null"); - final Long viewID = view.getModel().getSUID(); + final CyNetwork network = view.getModel(); // do this outside of the lock to fail early - if (!networkViewMap.containsKey(viewID)) + if (!networkViewMap.containsKey(network)) throw new IllegalArgumentException("network view is not recognized by this NetworkManager"); // let everyone know! cyEventHelper.fireEvent(new NetworkViewAboutToBeDestroyedEvent(this, view)); synchronized (this) { // do this again within the lock to be safe - if (!networkViewMap.containsKey(viewID)) + if (!networkViewMap.containsKey(network)) throw new IllegalArgumentException("network view is not recognized by this NetworkManager"); - networkViewMap.remove(viewID); + networkViewMap.remove(network); } cyEventHelper.fireEvent(new NetworkViewDestroyedEvent(this)); + logger.debug("######### Network View deleted: " + view.getSUID()); view = null; - logger.debug("######### Network View deleted: " + viewID); } @Override public void addNetworkView(final CyNetworkView view) { if (view == null) throw new NullPointerException("CyNetworkView is null"); final CyNetwork network = view.getModel(); synchronized (this) { logger.debug("Adding new Network View Model: Model ID = " + network.getSUID()); networkViewMap.put(network, view); } logger.debug("Firing event: NetworkViewAddedEvent"); cyEventHelper.fireEvent(new NetworkViewAddedEvent(this, view)); logger.debug("Done event: NetworkViewAddedEvent"); } }
false
true
public void destroyNetworkView(CyNetworkView view) { if (view == null) throw new NullPointerException("view is null"); final Long viewID = view.getModel().getSUID(); // do this outside of the lock to fail early if (!networkViewMap.containsKey(viewID)) throw new IllegalArgumentException("network view is not recognized by this NetworkManager"); // let everyone know! cyEventHelper.fireEvent(new NetworkViewAboutToBeDestroyedEvent(this, view)); synchronized (this) { // do this again within the lock to be safe if (!networkViewMap.containsKey(viewID)) throw new IllegalArgumentException("network view is not recognized by this NetworkManager"); networkViewMap.remove(viewID); } cyEventHelper.fireEvent(new NetworkViewDestroyedEvent(this)); view = null; logger.debug("######### Network View deleted: " + viewID); }
public void destroyNetworkView(CyNetworkView view) { if (view == null) throw new NullPointerException("view is null"); final CyNetwork network = view.getModel(); // do this outside of the lock to fail early if (!networkViewMap.containsKey(network)) throw new IllegalArgumentException("network view is not recognized by this NetworkManager"); // let everyone know! cyEventHelper.fireEvent(new NetworkViewAboutToBeDestroyedEvent(this, view)); synchronized (this) { // do this again within the lock to be safe if (!networkViewMap.containsKey(network)) throw new IllegalArgumentException("network view is not recognized by this NetworkManager"); networkViewMap.remove(network); } cyEventHelper.fireEvent(new NetworkViewDestroyedEvent(this)); logger.debug("######### Network View deleted: " + view.getSUID()); view = null; }
diff --git a/Login/src/de/mms/db/DBManager.java b/Login/src/de/mms/db/DBManager.java index 0d79408..9505fe7 100644 --- a/Login/src/de/mms/db/DBManager.java +++ b/Login/src/de/mms/db/DBManager.java @@ -1,123 +1,123 @@ package de.mms.db; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import de.mms.data.Subject; public class DBManager { public static final boolean LOCAL = true; protected static String URL = "jdbc:mysql://localhost/krm_db?"; protected static String USERDATA = "user=krm_user&password=wlc93Qx6aoJ4v"; protected static final String DRIVER = "com.mysql.jdbc.Driver"; static { try { // load driver Class.forName(DRIVER); } catch (ClassNotFoundException e) { e.printStackTrace(); System.exit(1); } } /** * Open the {@link Connection} to the MySQL database * * @return con * @throws SQLException */ protected static Connection openConnection() throws SQLException { if (LOCAL) { URL = "jdbc:mysql://localhost/krm_db?"; - USERDATA = "user=root&password=1913"; + USERDATA = "user=root&password="; } else { USERDATA = "user=krm_user&password=wlc93Qx6aoJ4v"; URL = "jdbc:mysql://bolonka-zwetna-von-der-kreuzbergquelle.de/krm_db?"; } Connection con = null; try { con = DriverManager.getConnection(URL + USERDATA); } catch (SQLException e) { System.out.println("No connection to database possible."); System.exit(2); } return con; } // end local databas connection /** * Close the {@link Connection} */ protected static void closeQuietly(Connection connection) { if (null == connection) return; try { connection.close(); } catch (SQLException e) { // ignored } } /** * Close the {@link Statement}s * * @param statment */ protected static void closeQuietly(Statement statment) { if (null == statment) return; try { statment.close(); } catch (SQLException e) { // ignored } } /** * Close the {@link ResultSet}s * * @param resultSet */ protected static void closeQuietly(ResultSet resultSet) { if (null == resultSet) return; try { resultSet.close(); } catch (SQLException e) { // ignored } } /** * for Pro ;) * * @param query * @throws SQLException */ public static void runQuery(String query) throws SQLException { Connection con = null; con = openConnection(); Statement stmt = con.createStatement(); con.setAutoCommit(false); stmt.executeUpdate(query); try { con.commit(); } catch (SQLException exc) { con.rollback(); // bei Fehlschlag Rollback der Transaktion System.out .println("COMMIT fehlgeschlagen - Rollback durchgefuehrt"); } finally { closeQuietly(stmt); closeQuietly(con); // Abbau Verbindung zur Datenbank } } }
true
true
protected static Connection openConnection() throws SQLException { if (LOCAL) { URL = "jdbc:mysql://localhost/krm_db?"; USERDATA = "user=root&password=1913"; } else { USERDATA = "user=krm_user&password=wlc93Qx6aoJ4v"; URL = "jdbc:mysql://bolonka-zwetna-von-der-kreuzbergquelle.de/krm_db?"; } Connection con = null; try { con = DriverManager.getConnection(URL + USERDATA); } catch (SQLException e) { System.out.println("No connection to database possible."); System.exit(2); } return con; }
protected static Connection openConnection() throws SQLException { if (LOCAL) { URL = "jdbc:mysql://localhost/krm_db?"; USERDATA = "user=root&password="; } else { USERDATA = "user=krm_user&password=wlc93Qx6aoJ4v"; URL = "jdbc:mysql://bolonka-zwetna-von-der-kreuzbergquelle.de/krm_db?"; } Connection con = null; try { con = DriverManager.getConnection(URL + USERDATA); } catch (SQLException e) { System.out.println("No connection to database possible."); System.exit(2); } return con; }
diff --git a/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java b/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java index 27d275ae0..473672b46 100644 --- a/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java +++ b/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java @@ -1,1663 +1,1663 @@ /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import java.io.BufferedInputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedList; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.common.Storage; import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory; import org.apache.hadoop.hdfs.server.namenode.FSImage.NameNodeDirType; import org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeInstrumentation; import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.UTF8; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableFactories; import org.apache.hadoop.io.WritableFactory; import org.apache.hadoop.security.token.delegation.DelegationKey; /** * FSEditLog maintains a log of the namespace modifications. * */ public class FSEditLog { public static final Log LOG = LogFactory.getLog(FSEditLog.class); static final byte OP_INVALID = -1; private static final byte OP_ADD = 0; private static final byte OP_RENAME = 1; // rename private static final byte OP_DELETE = 2; // delete private static final byte OP_MKDIR = 3; // create directory private static final byte OP_SET_REPLICATION = 4; // set replication //the following two are used only for backward compatibility : @Deprecated private static final byte OP_DATANODE_ADD = 5; @Deprecated private static final byte OP_DATANODE_REMOVE = 6; private static final byte OP_SET_PERMISSIONS = 7; private static final byte OP_SET_OWNER = 8; private static final byte OP_CLOSE = 9; // close after write private static final byte OP_SET_GENSTAMP = 10; // store genstamp /* The following two are not used any more. Should be removed once * LAST_UPGRADABLE_LAYOUT_VERSION is -17 or newer. */ private static final byte OP_SET_NS_QUOTA = 11; // set namespace quota private static final byte OP_CLEAR_NS_QUOTA = 12; // clear namespace quota private static final byte OP_TIMES = 13; // sets mod & access time on a file private static final byte OP_SET_QUOTA = 14; // sets name and disk quotas. private static final byte OP_GET_DELEGATION_TOKEN = 18; //new delegation token private static final byte OP_RENEW_DELEGATION_TOKEN = 19; //renew delegation token private static final byte OP_CANCEL_DELEGATION_TOKEN = 20; //cancel delegation token private static final byte OP_UPDATE_MASTER_KEY = 21; //update master key private static int sizeFlushBuffer = 512*1024; /** Preallocation length in bytes for writing edit log. */ static final int MIN_PREALLOCATION_LENGTH = 1024 * 1024; /** The limit of the length in bytes for each edit log transaction. */ private static int TRANSACTION_LENGTH_LIMIT = Integer.MAX_VALUE; private ArrayList<EditLogOutputStream> editStreams = null; private FSImage fsimage = null; // a monotonically increasing counter that represents transactionIds. private long txid = 0; // stores the last synced transactionId. private long synctxid = 0; // the time of printing the statistics to the log file. private long lastPrintTime; // is a sync currently running? private boolean isSyncRunning; // these are statistics counters. private long numTransactions; // number of transactions private long numTransactionsBatchedInSync; private long totalTimeTransactions; // total time for all transactions private NameNodeInstrumentation metrics; private static class TransactionId { public long txid; TransactionId(long value) { this.txid = value; } } // stores the most current transactionId of this thread. private static final ThreadLocal<TransactionId> myTransactionId = new ThreadLocal<TransactionId>() { protected synchronized TransactionId initialValue() { return new TransactionId(Long.MAX_VALUE); } }; /** * An implementation of the abstract class {@link EditLogOutputStream}, * which stores edits in a local file. */ static class EditLogFileOutputStream extends EditLogOutputStream { /** Preallocation buffer, padded with OP_INVALID */ private static final ByteBuffer PREALLOCATION_BUFFER = ByteBuffer.allocateDirect(MIN_PREALLOCATION_LENGTH); static { PREALLOCATION_BUFFER.position(0).limit(MIN_PREALLOCATION_LENGTH); for(int i = 0; i < PREALLOCATION_BUFFER.capacity(); i++) { PREALLOCATION_BUFFER.put(OP_INVALID); } } private File file; private FileOutputStream fp; // file stream for storing edit logs private FileChannel fc; // channel of the file stream for sync private DataOutputBuffer bufCurrent; // current buffer for writing private DataOutputBuffer bufReady; // buffer ready for flushing EditLogFileOutputStream(File name) throws IOException { super(); file = name; bufCurrent = new DataOutputBuffer(sizeFlushBuffer); bufReady = new DataOutputBuffer(sizeFlushBuffer); RandomAccessFile rp = new RandomAccessFile(name, "rw"); fp = new FileOutputStream(rp.getFD()); // open for append fc = rp.getChannel(); fc.position(fc.size()); } @Override String getName() { return file.getPath(); } /** {@inheritDoc} */ @Override public void write(int b) throws IOException { bufCurrent.write(b); } /** {@inheritDoc} */ @Override void write(byte op, Writable ... writables) throws IOException { write(op); for(Writable w : writables) { w.write(bufCurrent); } } /** * Create empty edits logs file. */ @Override void create() throws IOException { fc.truncate(0); fc.position(0); bufCurrent.writeInt(FSConstants.LAYOUT_VERSION); setReadyToFlush(); flush(); } @Override public void close() throws IOException { LOG.info("closing edit log: position=" + fc.position() + ", editlog=" + getName()); // close should have been called after all pending transactions // have been flushed & synced. int bufSize = bufCurrent.size(); if (bufSize != 0) { throw new IOException("FSEditStream has " + bufSize + " bytes still to be flushed and cannot " + "be closed."); } bufCurrent.close(); bufReady.close(); // remove any preallocated padding bytes from the transaction log. fc.truncate(fc.position()); fp.close(); bufCurrent = bufReady = null; LOG.info("close success: truncate to " + file.length() + ", editlog=" + getName()); } /** * All data that has been written to the stream so far will be flushed. * New data can be still written to the stream while flushing is performed. */ @Override void setReadyToFlush() throws IOException { assert bufReady.size() == 0 : "previous data is not flushed yet"; DataOutputBuffer tmp = bufReady; bufReady = bufCurrent; bufCurrent = tmp; } /** * Flush ready buffer to persistent store. * currentBuffer is not flushed as it accumulates new log records * while readyBuffer will be flushed and synced. */ @Override protected void flushAndSync() throws IOException { preallocate(); // preallocate file if necessary bufReady.writeTo(fp); // write data to file bufReady.reset(); // erase all data in the buffer fc.force(false); // metadata updates not needed because of preallocation } /** * Return the size of the current edit log including buffered data. */ @Override long length() throws IOException { // file size + size of both buffers return fc.size() + bufReady.size() + bufCurrent.size(); } // allocate a big chunk of data private void preallocate() throws IOException { long size = fc.size(); int bufSize = bufReady.getLength(); long need = bufSize - (size - fc.position()); if (need <= 0) { return; } long oldSize = size; long total = 0; long fillCapacity = PREALLOCATION_BUFFER.capacity(); PREALLOCATION_BUFFER.position(0); while (need > 0) { do { size += fc.write(PREALLOCATION_BUFFER, size); } while (PREALLOCATION_BUFFER.remaining() > 0); need -= fillCapacity; total += fillCapacity; } if(FSNamesystem.LOG.isDebugEnabled()) { FSNamesystem.LOG.debug("Preallocated " + total + " bytes at the end of " + "the edit log (offset " + oldSize + ")"); } } /** * Returns the file associated with this stream */ File getFile() { return file; } } static class EditLogFileInputStream extends EditLogInputStream { private File file; private FileInputStream fStream; EditLogFileInputStream(File name) throws IOException { file = name; fStream = new FileInputStream(name); } @Override String getName() { return file.getPath(); } @Override public int available() throws IOException { return fStream.available(); } @Override public int read() throws IOException { return fStream.read(); } @Override public int read(byte[] b, int off, int len) throws IOException { return fStream.read(b, off, len); } @Override public void close() throws IOException { fStream.close(); } @Override long length() throws IOException { // file size + size of both buffers return file.length(); } } FSEditLog(FSImage image) { fsimage = image; isSyncRunning = false; metrics = NameNode.getNameNodeMetrics(); lastPrintTime = FSNamesystem.now(); } private File getEditFile(StorageDirectory sd) { return fsimage.getEditFile(sd); } private File getEditNewFile(StorageDirectory sd) { return fsimage.getEditNewFile(sd); } private int getNumStorageDirs() { int numStorageDirs = 0; Iterator<StorageDirectory> it = fsimage.dirIterator(NameNodeDirType.EDITS); while (it.hasNext()) { numStorageDirs++; it.next(); } return numStorageDirs; } synchronized int getNumEditStreams() { return editStreams == null ? 0 : editStreams.size(); } boolean isOpen() { return getNumEditStreams() > 0; } /** * Create empty edit log files. * Initialize the output stream for logging. * * @throws IOException */ public synchronized void open() throws IOException { numTransactions = totalTimeTransactions = numTransactionsBatchedInSync = 0; if (editStreams == null) { editStreams = new ArrayList<EditLogOutputStream>(); } Iterator<StorageDirectory> it = fsimage.dirIterator(NameNodeDirType.EDITS); while (it.hasNext()) { StorageDirectory sd = it.next(); File eFile = getEditFile(sd); try { EditLogOutputStream eStream = new EditLogFileOutputStream(eFile); editStreams.add(eStream); } catch (IOException ioe) { fsimage.updateRemovedDirs(sd, ioe); it.remove(); } } exitIfNoStreams(); } public synchronized void createEditLogFile(File name) throws IOException { EditLogOutputStream eStream = new EditLogFileOutputStream(name); eStream.create(); eStream.close(); } /** * Shutdown the file store. */ public synchronized void close() throws IOException { while (isSyncRunning) { try { wait(1000); } catch (InterruptedException ie) { } } if (editStreams == null) { return; } printStatistics(true); numTransactions = totalTimeTransactions = numTransactionsBatchedInSync = 0; for (int idx = 0; idx < editStreams.size(); idx++) { EditLogOutputStream eStream = editStreams.get(idx); try { eStream.setReadyToFlush(); eStream.flush(); eStream.close(); } catch (IOException ioe) { removeEditsAndStorageDir(idx); idx--; } } editStreams.clear(); } void fatalExit(String msg) { LOG.fatal(msg, new Exception(msg)); Runtime.getRuntime().exit(-1); } /** * Exit the NN process if the edit streams have not yet been * initialized, eg we failed while opening. */ private void exitIfStreamsNotSet() { if (editStreams == null) { fatalExit("Edit streams not yet initialized"); } } /** * Exit the NN process if there are no edit streams to log to. */ void exitIfNoStreams() { if (editStreams == null || editStreams.isEmpty()) { fatalExit("No edit streams are accessible"); } } /** * @return the storage directory for the given edit stream. */ private File getStorageDirForStream(int idx) { File editsFile = ((EditLogFileOutputStream)editStreams.get(idx)).getFile(); // Namedir is the parent of current which is the parent of edits return editsFile.getParentFile().getParentFile(); } /** * Remove the given edits stream and its containing storage dir. */ synchronized void removeEditsAndStorageDir(int idx) { exitIfStreamsNotSet(); assert idx < getNumStorageDirs(); assert getNumStorageDirs() == editStreams.size(); File dir = getStorageDirForStream(idx); editStreams.remove(idx); exitIfNoStreams(); fsimage.removeStorageDir(dir); } /** * Remove all edits streams for the given storage directory. */ synchronized void removeEditsForStorageDir(StorageDirectory sd) { exitIfStreamsNotSet(); if (!sd.getStorageDirType().isOfType(NameNodeDirType.EDITS)) { return; } for (int idx = 0; idx < editStreams.size(); idx++) { File parentDir = getStorageDirForStream(idx); if (parentDir.getAbsolutePath().equals( sd.getRoot().getAbsolutePath())) { editStreams.remove(idx); idx--; } } exitIfNoStreams(); } /** * Remove each of the given edits streams and their corresponding * storage directories. */ private void removeEditsStreamsAndStorageDirs( ArrayList<EditLogOutputStream> errorStreams) { if (errorStreams == null) { return; } for (EditLogOutputStream errorStream : errorStreams) { int idx = editStreams.indexOf(errorStream); if (-1 == idx) { fatalExit("Unable to find edits stream with IO error"); } removeEditsAndStorageDir(idx); } fsimage.incrementCheckpointTime(); } /** * check if ANY edits.new log exists */ boolean existsNew() throws IOException { Iterator<StorageDirectory> it = fsimage.dirIterator(NameNodeDirType.EDITS); while (it.hasNext()) { if (getEditNewFile(it.next()).exists()) { return true; } } return false; } /** * The end of a edit log should contain padding of either 0x00 or OP_INVALID. * If it contains other bytes, the edit log may be corrupted. * It is important to perform the check; otherwise, a stray OP_INVALID byte * could be misinterpreted as an end-of-log, and lead to silent data loss. */ private static void checkEndOfLog(final EditLogInputStream edits, final DataInputStream in, final PositionTrackingInputStream pin, final int tolerationLength) throws IOException { if (tolerationLength < 0) { //the checking is disabled. return; } LOG.info("Start checking end of edit log (" + edits.getName() + ") ..."); in.mark(0); //clear the mark final long readLength = pin.getPos(); long firstPadPos = -1; //first padded byte position byte pad = 0; //padded value; must be either 0 or OP_INVALID final byte[] bytes = new byte[4096]; for(int n; (n = in.read(bytes)) != -1; ) { for(int i = 0; i < n; i++) { final byte b = bytes[i]; if (firstPadPos != -1 && b != pad) { //the byte is different from the first padded byte, reset firstPos firstPadPos = -1; if (LOG.isDebugEnabled()) { LOG.debug(String.format("reset: bytes[%d]=0x%X, pad=0x%02X", i, b, pad)); } } if (firstPadPos == -1) { if (b == 0 || b == OP_INVALID) { //found the first padded byte firstPadPos = pin.getPos() - n + i; pad = b; if (LOG.isDebugEnabled()) { LOG.debug(String.format("found: bytes[%d]=0x%02X=pad, firstPadPos=%d", i, b, firstPadPos)); } } } } } final long corruptionLength; final long padLength; if (firstPadPos == -1) { //padding not found corruptionLength = edits.length() - readLength; padLength = 0; } else { corruptionLength = firstPadPos - readLength; padLength = edits.length() - firstPadPos; } LOG.info("Checked the bytes after the end of edit log (" + edits.getName() + "):"); LOG.info(" Padding position = " + firstPadPos + " (-1 means padding not found)"); LOG.info(" Edit log length = " + edits.length()); LOG.info(" Read length = " + readLength); LOG.info(" Corruption length = " + corruptionLength); LOG.info(" Toleration length = " + tolerationLength + " (= " + DFSConfigKeys.DFS_NAMENODE_EDITS_TOLERATION_LENGTH_KEY + ")"); LOG.info(String.format( "Summary: |---------- Read=%d ----------|-- Corrupt=%d --|-- Pad=%d --|", readLength, corruptionLength, padLength)); if (pin.getPos() != edits.length()) { throw new IOException("Edit log length mismatched: edits.length() = " + edits.length() + " != input steam position = " + pin.getPos()); } if (corruptionLength > 0) { final String err = "Edit log corruption detected: corruption length = " + corruptionLength; if (corruptionLength <= tolerationLength) { LOG.warn(err + " <= toleration length = " + tolerationLength + "; the corruption is tolerable."); } else { throw new IOException(err + " > toleration length = " + tolerationLength + "; the corruption is intolerable."); } } return; } /** * Load an edit log, and apply the changes to the in-memory structure * This is where we apply edits that we've been writing to disk all * along. */ static int loadFSEdits(EditLogInputStream edits, int tolerationLength, MetaRecoveryContext recovery) throws IOException { FSNamesystem fsNamesys = FSNamesystem.getFSNamesystem(); FSDirectory fsDir = fsNamesys.dir; int numEdits = 0; int logVersion = 0; String clientName = null; String clientMachine = null; String path = null; int numOpAdd = 0, numOpClose = 0, numOpDelete = 0, numOpRename = 0, numOpSetRepl = 0, numOpMkDir = 0, numOpSetPerm = 0, numOpSetOwner = 0, numOpSetGenStamp = 0, numOpTimes = 0, numOpGetDelegationToken = 0, numOpRenewDelegationToken = 0, numOpCancelDelegationToken = 0, numOpUpdateMasterKey = 0, numOpOther = 0; long highestGenStamp = -1; long startTime = FSNamesystem.now(); // Keep track of the file offsets of the last several opcodes. // This is handy when manually recovering corrupted edits files. PositionTrackingInputStream tracker = new PositionTrackingInputStream(new BufferedInputStream(edits)); long recentOpcodeOffsets[] = new long[4]; Arrays.fill(recentOpcodeOffsets, -1); - final boolean isToterationEnabled = tolerationLength >= 0; + final boolean isTolerationEnabled = tolerationLength >= 0; DataInputStream in = new DataInputStream(tracker); Byte opcode = null; try { // Read log file version. Could be missing. in.mark(4); // If edits log is greater than 2G, available method will return negative // numbers, so we avoid having to call available boolean available = true; try { logVersion = in.readByte(); } catch (EOFException e) { available = false; } if (available) { in.reset(); logVersion = in.readInt(); if (logVersion < FSConstants.LAYOUT_VERSION) // future version throw new IOException( "Unexpected version of the file system log file: " + logVersion + ". Current version = " + FSConstants.LAYOUT_VERSION + "."); } assert logVersion <= Storage.LAST_UPGRADABLE_LAYOUT_VERSION : "Unsupported version " + logVersion; while (true) { - if (isToterationEnabled) { + if (isTolerationEnabled) { //mark position could be reset in case of exceptions in.mark(TRANSACTION_LENGTH_LIMIT); } long timestamp = 0; long mtime = 0; long atime = 0; long blockSize = 0; opcode = null; try { opcode = in.readByte(); if (opcode == OP_INVALID) { LOG.info("Invalid opcode, reached end of edit log " + "Number of transactions found: " + numEdits + ". " + "Bytes read: " + tracker.getPos()); break; // no more transactions } } catch (EOFException e) { LOG.info("Reading " + edits.getName() + ": " + e); break; // no more transactions } recentOpcodeOffsets[numEdits % recentOpcodeOffsets.length] = tracker.getPos(); numEdits++; switch (opcode) { case OP_ADD: case OP_CLOSE: { // versions > 0 support per file replication // get name and replication int length = in.readInt(); if (-7 == logVersion && length != 3|| -17 < logVersion && logVersion < -7 && length != 4 || logVersion <= -17 && length != 5) { throw new IOException("Incorrect data format." + " logVersion is " + logVersion + " but writables.length is " + length + ". "); } path = FSImage.readString(in); short replication = adjustReplication(readShort(in)); mtime = readLong(in); if (logVersion <= -17) { atime = readLong(in); } if (logVersion < -7) { blockSize = readLong(in); } // get blocks Block blocks[] = null; if (logVersion <= -14) { blocks = readBlocks(in); } else { BlockTwo oldblk = new BlockTwo(); int num = in.readInt(); blocks = new Block[num]; for (int i = 0; i < num; i++) { oldblk.readFields(in); blocks[i] = new Block(oldblk.blkid, oldblk.len, Block.GRANDFATHER_GENERATION_STAMP); } } // Older versions of HDFS does not store the block size in inode. // If the file has more than one block, use the size of the // first block as the blocksize. Otherwise use the default // block size. if (-8 <= logVersion && blockSize == 0) { if (blocks.length > 1) { blockSize = blocks[0].getNumBytes(); } else { long first = ((blocks.length == 1)? blocks[0].getNumBytes(): 0); blockSize = Math.max(fsNamesys.getDefaultBlockSize(), first); } } PermissionStatus permissions = fsNamesys.getUpgradePermission(); if (logVersion <= -11) { permissions = PermissionStatus.read(in); } // clientname, clientMachine and block locations of last block. if (opcode == OP_ADD && logVersion <= -12) { clientName = FSImage.readString(in); clientMachine = FSImage.readString(in); if (-13 <= logVersion) { readDatanodeDescriptorArray(in); } } else { clientName = ""; clientMachine = ""; } // The open lease transaction re-creates a file if necessary. // Delete the file if it already exists. if (LOG.isDebugEnabled()) { LOG.debug(opcode + ": " + path + " numblocks : " + blocks.length + " clientHolder " + clientName + " clientMachine " + clientMachine); } fsDir.unprotectedDelete(path, mtime); // add to the file tree INodeFile node = (INodeFile)fsDir.unprotectedAddFile( path, permissions, blocks, replication, mtime, atime, blockSize); if (opcode == OP_ADD) { numOpAdd++; // // Replace current node with a INodeUnderConstruction. // Recreate in-memory lease record. // INodeFileUnderConstruction cons = new INodeFileUnderConstruction( node.getLocalNameBytes(), node.getReplication(), node.getModificationTime(), node.getPreferredBlockSize(), node.getBlocks(), node.getPermissionStatus(), clientName, clientMachine, null); fsDir.replaceNode(path, node, cons); fsNamesys.leaseManager.addLease(cons.clientName, path); } break; } case OP_SET_REPLICATION: { numOpSetRepl++; path = FSImage.readString(in); short replication = adjustReplication(readShort(in)); fsDir.unprotectedSetReplication(path, replication, null); break; } case OP_RENAME: { numOpRename++; int length = in.readInt(); if (length != 3) { throw new IOException("Incorrect data format. " + "Mkdir operation."); } String s = FSImage.readString(in); String d = FSImage.readString(in); timestamp = readLong(in); HdfsFileStatus dinfo = fsDir.getFileInfo(d); fsDir.unprotectedRenameTo(s, d, timestamp); fsNamesys.changeLease(s, d, dinfo); break; } case OP_DELETE: { numOpDelete++; int length = in.readInt(); if (length != 2) { throw new IOException("Incorrect data format. " + "delete operation."); } path = FSImage.readString(in); timestamp = readLong(in); fsDir.unprotectedDelete(path, timestamp); break; } case OP_MKDIR: { numOpMkDir++; PermissionStatus permissions = fsNamesys.getUpgradePermission(); int length = in.readInt(); if (-17 < logVersion && length != 2 || logVersion <= -17 && length != 3) { throw new IOException("Incorrect data format. " + "Mkdir operation."); } path = FSImage.readString(in); timestamp = readLong(in); // The disk format stores atimes for directories as well. // However, currently this is not being updated/used because of // performance reasons. if (logVersion <= -17) { atime = readLong(in); } if (logVersion <= -11) { permissions = PermissionStatus.read(in); } fsDir.unprotectedMkdir(path, permissions, timestamp); break; } case OP_SET_GENSTAMP: { numOpSetGenStamp++; long lw = in.readLong(); if ((highestGenStamp != -1) && (highestGenStamp + 1 != lw)) { throw new IOException("OP_SET_GENSTAMP tried to set a genstamp of " + lw + " but the previous highest genstamp was " + highestGenStamp); } highestGenStamp = lw; fsDir.namesystem.setGenerationStamp(lw); break; } case OP_DATANODE_ADD: { numOpOther++; FSImage.DatanodeImage nodeimage = new FSImage.DatanodeImage(); nodeimage.readFields(in); //Datnodes are not persistent any more. break; } case OP_DATANODE_REMOVE: { numOpOther++; DatanodeID nodeID = new DatanodeID(); nodeID.readFields(in); //Datanodes are not persistent any more. break; } case OP_SET_PERMISSIONS: { numOpSetPerm++; if (logVersion > -11) throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); fsDir.unprotectedSetPermission( FSImage.readString(in), FsPermission.read(in)); break; } case OP_SET_OWNER: { numOpSetOwner++; if (logVersion > -11) throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); fsDir.unprotectedSetOwner(FSImage.readString(in), FSImage.readString_EmptyAsNull(in), FSImage.readString_EmptyAsNull(in)); break; } case OP_SET_NS_QUOTA: { if (logVersion > -16) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } fsDir.unprotectedSetQuota(FSImage.readString(in), readLongWritable(in), FSConstants.QUOTA_DONT_SET); break; } case OP_CLEAR_NS_QUOTA: { if (logVersion > -16) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } fsDir.unprotectedSetQuota(FSImage.readString(in), FSConstants.QUOTA_RESET, FSConstants.QUOTA_DONT_SET); break; } case OP_SET_QUOTA: fsDir.unprotectedSetQuota(FSImage.readString(in), readLongWritable(in), readLongWritable(in)); break; case OP_TIMES: { numOpTimes++; int length = in.readInt(); if (length != 3) { throw new IOException("Incorrect data format. " + "times operation."); } path = FSImage.readString(in); mtime = readLong(in); atime = readLong(in); fsDir.unprotectedSetTimes(path, mtime, atime, true); break; } case OP_GET_DELEGATION_TOKEN: { if (logVersion > -19) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } numOpGetDelegationToken++; DelegationTokenIdentifier delegationTokenId = new DelegationTokenIdentifier(); delegationTokenId.readFields(in); long expiryTime = readLong(in); fsNamesys.getDelegationTokenSecretManager() .addPersistedDelegationToken(delegationTokenId, expiryTime); break; } case OP_RENEW_DELEGATION_TOKEN: { if (logVersion > -19) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } numOpRenewDelegationToken++; DelegationTokenIdentifier delegationTokenId = new DelegationTokenIdentifier(); delegationTokenId.readFields(in); long expiryTime = readLong(in); fsNamesys.getDelegationTokenSecretManager() .updatePersistedTokenRenewal(delegationTokenId, expiryTime); break; } case OP_CANCEL_DELEGATION_TOKEN: { if (logVersion > -19) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } numOpCancelDelegationToken++; DelegationTokenIdentifier delegationTokenId = new DelegationTokenIdentifier(); delegationTokenId.readFields(in); fsNamesys.getDelegationTokenSecretManager() .updatePersistedTokenCancellation(delegationTokenId); break; } case OP_UPDATE_MASTER_KEY: { if (logVersion > -19) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } numOpUpdateMasterKey++; DelegationKey delegationKey = new DelegationKey(); delegationKey.readFields(in); fsNamesys.getDelegationTokenSecretManager().updatePersistedMasterKey( delegationKey); break; } default: { throw new IOException("Never seen opcode " + opcode); } } } } catch (Throwable t) { String msg = "Failed to parse edit log (" + edits.getName() + ") at position " + tracker.getPos() + ", edit log length is " + edits.length() + ", opcode=" + opcode - + ", isToterationEnabled=" + isToterationEnabled; + + ", isTolerationEnabled=" + isTolerationEnabled; // Catch Throwable because in the case of a truly corrupt edits log, any // sort of error might be thrown (NumberFormat, NullPointer, EOF, etc.) if (Storage.is203LayoutVersion(logVersion) && logVersion != FSConstants.LAYOUT_VERSION) { // Failed to load 0.20.203 version edits during upgrade. This version has // conflicting opcodes with the later releases. The editlog must be // emptied by restarting the namenode, before proceeding with the upgrade. msg += ": During upgrade, failed to load the editlog version " + logVersion + " from release 0.20.203. Please go back to the old " + " release and restart the namenode. This empties the editlog " + " and saves the namespace. Resume the upgrade after this step."; throw new IOException(msg, t); } if (recentOpcodeOffsets[0] != -1) { Arrays.sort(recentOpcodeOffsets); StringBuilder sb = new StringBuilder(", Recent opcode offsets=[") .append(recentOpcodeOffsets[0]); for (int i = 1; i < recentOpcodeOffsets.length; i++) { if (recentOpcodeOffsets[i] != -1) { sb.append(' ').append(recentOpcodeOffsets[i]); } } msg += sb.append("]"); } LOG.warn(msg, t); - if (isToterationEnabled) { + if (isTolerationEnabled) { in.reset(); //reset to the beginning position of this transaction } else { //edit log toleration feature is disabled MetaRecoveryContext.editLogLoaderPrompt(msg, recovery); } } finally { try { checkEndOfLog(edits, in, tracker, tolerationLength); } finally { in.close(); } } LOG.info("Edits file " + edits.getName() + " of size " + edits.length() + " edits # " + numEdits + " loaded in " + (FSNamesystem.now()-startTime)/1000 + " seconds."); if (LOG.isDebugEnabled()) { LOG.debug("numOpAdd = " + numOpAdd + " numOpClose = " + numOpClose + " numOpDelete = " + numOpDelete + " numOpRename = " + numOpRename + " numOpSetRepl = " + numOpSetRepl + " numOpMkDir = " + numOpMkDir + " numOpSetPerm = " + numOpSetPerm + " numOpSetOwner = " + numOpSetOwner + " numOpSetGenStamp = " + numOpSetGenStamp + " numOpTimes = " + numOpTimes + " numOpGetDelegationToken = " + numOpGetDelegationToken + " numOpRenewDelegationToken = " + numOpRenewDelegationToken + " numOpCancelDelegationToken = " + numOpCancelDelegationToken + " numOpUpdateMasterKey = " + numOpUpdateMasterKey + " numOpOther = " + numOpOther); } if (logVersion != FSConstants.LAYOUT_VERSION) // other version numEdits++; // save this image asap return numEdits; } // a place holder for reading a long private static final LongWritable longWritable = new LongWritable(); /** Read an integer from an input stream */ private static long readLongWritable(DataInputStream in) throws IOException { synchronized (longWritable) { longWritable.readFields(in); return longWritable.get(); } } static short adjustReplication(short replication) { FSNamesystem fsNamesys = FSNamesystem.getFSNamesystem(); short minReplication = fsNamesys.getMinReplication(); if (replication<minReplication) { replication = minReplication; } short maxReplication = fsNamesys.getMaxReplication(); if (replication>maxReplication) { replication = maxReplication; } return replication; } /** * Write an operation to the edit log. Do not sync to persistent * store yet. */ synchronized void logEdit(byte op, Writable ... writables) { if (getNumEditStreams() < 1) { throw new AssertionError("No edit streams to log to"); } long start = FSNamesystem.now(); for (int idx = 0; idx < editStreams.size(); idx++) { EditLogOutputStream eStream = editStreams.get(idx); try { eStream.write(op, writables); } catch (IOException ioe) { removeEditsAndStorageDir(idx); idx--; } } exitIfNoStreams(); // get a new transactionId txid++; // // record the transactionId when new data was written to the edits log // TransactionId id = myTransactionId.get(); id.txid = txid; // update statistics long end = FSNamesystem.now(); numTransactions++; totalTimeTransactions += (end-start); if (metrics != null) // Metrics is non-null only when used inside name node metrics.addTransaction(end-start); } // // Sync all modifications done by this thread. // public void logSync() throws IOException { ArrayList<EditLogOutputStream> errorStreams = null; long syncStart = 0; // Fetch the transactionId of this thread. long mytxid = myTransactionId.get().txid; ArrayList<EditLogOutputStream> streams = new ArrayList<EditLogOutputStream>(); boolean sync = false; try { synchronized (this) { printStatistics(false); // if somebody is already syncing, then wait while (mytxid > synctxid && isSyncRunning) { try { wait(1000); } catch (InterruptedException ie) { } } // // If this transaction was already flushed, then nothing to do // if (mytxid <= synctxid) { numTransactionsBatchedInSync++; if (metrics != null) // Metrics is non-null only when used inside name node metrics.incrTransactionsBatchedInSync(); return; } // now, this thread will do the sync syncStart = txid; isSyncRunning = true; sync = true; // swap buffers exitIfNoStreams(); for(EditLogOutputStream eStream : editStreams) { try { eStream.setReadyToFlush(); streams.add(eStream); } catch (IOException ie) { LOG.error("Unable to get ready to flush.", ie); // // remember the streams that encountered an error. // if (errorStreams == null) { errorStreams = new ArrayList<EditLogOutputStream>(1); } errorStreams.add(eStream); } } } // do the sync long start = FSNamesystem.now(); for (EditLogOutputStream eStream : streams) { try { eStream.flush(); } catch (IOException ie) { LOG.error("Unable to sync edit log.", ie); // // remember the streams that encountered an error. // if (errorStreams == null) { errorStreams = new ArrayList<EditLogOutputStream>(1); } errorStreams.add(eStream); } } long elapsed = FSNamesystem.now() - start; removeEditsStreamsAndStorageDirs(errorStreams); exitIfNoStreams(); if (metrics != null) // Metrics is non-null only when used inside name node metrics.addSync(elapsed); } finally { synchronized (this) { if(sync) { synctxid = syncStart; isSyncRunning = false; } this.notifyAll(); } } } // // print statistics every 1 minute. // private void printStatistics(boolean force) { long now = FSNamesystem.now(); if (lastPrintTime + 60000 > now && !force) { return; } if (editStreams == null || editStreams.size()==0) { return; } lastPrintTime = now; StringBuilder buf = new StringBuilder(); buf.append("Number of transactions: "); buf.append(numTransactions); buf.append(" Total time for transactions(ms): "); buf.append(totalTimeTransactions); buf.append("Number of transactions batched in Syncs: "); buf.append(numTransactionsBatchedInSync); buf.append(" Number of syncs: "); buf.append(editStreams.get(0).getNumSync()); buf.append(" SyncTimes(ms): "); int numEditStreams = editStreams.size(); for (int idx = 0; idx < numEditStreams; idx++) { EditLogOutputStream eStream = editStreams.get(idx); buf.append(eStream.getTotalSyncTime()); buf.append(" "); } LOG.info(buf); } /** * Add open lease record to edit log. * Records the block locations of the last block. */ public void logOpenFile(String path, INodeFileUnderConstruction newNode) throws IOException { UTF8 nameReplicationPair[] = new UTF8[] { new UTF8(path), FSEditLog.toLogReplication(newNode.getReplication()), FSEditLog.toLogLong(newNode.getModificationTime()), FSEditLog.toLogLong(newNode.getAccessTime()), FSEditLog.toLogLong(newNode.getPreferredBlockSize())}; logEdit(OP_ADD, new ArrayWritable(UTF8.class, nameReplicationPair), new ArrayWritable(Block.class, newNode.getBlocks()), newNode.getPermissionStatus(), new UTF8(newNode.getClientName()), new UTF8(newNode.getClientMachine())); } /** * Add close lease record to edit log. */ public void logCloseFile(String path, INodeFile newNode) { UTF8 nameReplicationPair[] = new UTF8[] { new UTF8(path), FSEditLog.toLogReplication(newNode.getReplication()), FSEditLog.toLogLong(newNode.getModificationTime()), FSEditLog.toLogLong(newNode.getAccessTime()), FSEditLog.toLogLong(newNode.getPreferredBlockSize())}; logEdit(OP_CLOSE, new ArrayWritable(UTF8.class, nameReplicationPair), new ArrayWritable(Block.class, newNode.getBlocks()), newNode.getPermissionStatus()); } /** * Add create directory record to edit log */ public void logMkDir(String path, INode newNode) { UTF8 info[] = new UTF8[] { new UTF8(path), FSEditLog.toLogLong(newNode.getModificationTime()), FSEditLog.toLogLong(newNode.getAccessTime()) }; logEdit(OP_MKDIR, new ArrayWritable(UTF8.class, info), newNode.getPermissionStatus()); } /** * Add rename record to edit log * TODO: use String parameters until just before writing to disk */ void logRename(String src, String dst, long timestamp) { UTF8 info[] = new UTF8[] { new UTF8(src), new UTF8(dst), FSEditLog.toLogLong(timestamp)}; logEdit(OP_RENAME, new ArrayWritable(UTF8.class, info)); } /** * Add set replication record to edit log */ void logSetReplication(String src, short replication) { logEdit(OP_SET_REPLICATION, new UTF8(src), FSEditLog.toLogReplication(replication)); } /** Add set namespace quota record to edit log * * @param src the string representation of the path to a directory * @param quota the directory size limit */ void logSetQuota(String src, long nsQuota, long dsQuota) { logEdit(OP_SET_QUOTA, new UTF8(src), new LongWritable(nsQuota), new LongWritable(dsQuota)); } /** Add set permissions record to edit log */ void logSetPermissions(String src, FsPermission permissions) { logEdit(OP_SET_PERMISSIONS, new UTF8(src), permissions); } /** Add set owner record to edit log */ void logSetOwner(String src, String username, String groupname) { UTF8 u = new UTF8(username == null? "": username); UTF8 g = new UTF8(groupname == null? "": groupname); logEdit(OP_SET_OWNER, new UTF8(src), u, g); } /** * Add delete file record to edit log */ void logDelete(String src, long timestamp) { UTF8 info[] = new UTF8[] { new UTF8(src), FSEditLog.toLogLong(timestamp)}; logEdit(OP_DELETE, new ArrayWritable(UTF8.class, info)); } /** * Add generation stamp record to edit log */ void logGenerationStamp(long genstamp) { logEdit(OP_SET_GENSTAMP, new LongWritable(genstamp)); } /** * Add access time record to edit log */ void logTimes(String src, long mtime, long atime) { UTF8 info[] = new UTF8[] { new UTF8(src), FSEditLog.toLogLong(mtime), FSEditLog.toLogLong(atime)}; logEdit(OP_TIMES, new ArrayWritable(UTF8.class, info)); } /** * log delegation token to edit log * @param id DelegationTokenIdentifier * @param expiryTime of the token * @return */ void logGetDelegationToken(DelegationTokenIdentifier id, long expiryTime) { logEdit(OP_GET_DELEGATION_TOKEN, id, FSEditLog.toLogLong(expiryTime)); } void logRenewDelegationToken(DelegationTokenIdentifier id, long expiryTime) { logEdit(OP_RENEW_DELEGATION_TOKEN, id, FSEditLog.toLogLong(expiryTime)); } void logCancelDelegationToken(DelegationTokenIdentifier id) { logEdit(OP_CANCEL_DELEGATION_TOKEN, id); } void logUpdateMasterKey(DelegationKey key) { logEdit(OP_UPDATE_MASTER_KEY, key); } static private UTF8 toLogReplication(short replication) { return new UTF8(Short.toString(replication)); } static private UTF8 toLogLong(long timestamp) { return new UTF8(Long.toString(timestamp)); } /** * Return the size of the current EditLog */ synchronized long getEditLogSize() throws IOException { assert(getNumStorageDirs() == editStreams.size()); long size = 0; for (int idx = 0; idx < editStreams.size(); idx++) { long curSize = editStreams.get(idx).length(); assert (size == 0 || size == curSize) : "All streams must be the same"; size = curSize; } return size; } /** * Closes the current edit log and opens edits.new. * Returns the lastModified time of the edits log. */ synchronized void rollEditLog() throws IOException { // // If edits.new already exists in some directory, verify it // exists in all directories. // if (existsNew()) { Iterator<StorageDirectory> it = fsimage.dirIterator(NameNodeDirType.EDITS); StringBuilder b = new StringBuilder(); while (it.hasNext()) { File editsNew = getEditNewFile(it.next()); b.append("\n ").append(editsNew); if (!editsNew.exists()) { throw new IOException( "Inconsistent existence of edits.new " + editsNew); } } LOG.warn("Cannot roll edit log," + " edits.new files already exists in all healthy directories:" + b); return; } close(); // close existing edit log // After edit streams are closed, healthy edits files should be identical, // and same to fsimage files fsimage.restoreStorageDirs(); // // Open edits.new // Iterator<StorageDirectory> it = fsimage.dirIterator(NameNodeDirType.EDITS); LinkedList<StorageDirectory> toRemove = new LinkedList<StorageDirectory>(); while (it.hasNext()) { StorageDirectory sd = it.next(); try { EditLogFileOutputStream eStream = new EditLogFileOutputStream(getEditNewFile(sd)); eStream.create(); editStreams.add(eStream); } catch (IOException ioe) { LOG.error("error retrying to reopen storage directory '" + sd.getRoot().getAbsolutePath() + "'", ioe); toRemove.add(sd); it.remove(); } } // updateRemovedDirs will abort the NameNode if it removes the last // valid edit log directory. for (StorageDirectory sd : toRemove) { removeEditsForStorageDir(sd); fsimage.updateRemovedDirs(sd); } exitIfNoStreams(); } /** * Removes the old edit log and renamed edits.new as edits. * Reopens the edits file. */ synchronized void purgeEditLog() throws IOException { // // If edits.new does not exists, then return error. // if (!existsNew()) { throw new IOException("Attempt to purge edit log " + "but edits.new does not exist."); } close(); // // Delete edits and rename edits.new to edits. // Iterator<StorageDirectory> it = fsimage.dirIterator(NameNodeDirType.EDITS); while (it.hasNext()) { StorageDirectory sd = it.next(); if (!getEditNewFile(sd).renameTo(getEditFile(sd))) { // // renameTo() fails on Windows if the destination // file exists. // getEditFile(sd).delete(); if (!getEditNewFile(sd).renameTo(getEditFile(sd))) { sd.unlock(); removeEditsForStorageDir(sd); fsimage.updateRemovedDirs(sd); it.remove(); } } } // // Reopen all the edits logs. // open(); } /** * Return the name of the edit file */ synchronized File getFsEditName() throws IOException { StorageDirectory sd = null; for (Iterator<StorageDirectory> it = fsimage.dirIterator(NameNodeDirType.EDITS); it.hasNext();) sd = it.next(); return getEditFile(sd); } /** * Returns the timestamp of the edit log */ synchronized long getFsEditTime() { Iterator<StorageDirectory> it = fsimage.dirIterator(NameNodeDirType.EDITS); if(it.hasNext()) return getEditFile(it.next()).lastModified(); return 0; } // sets the initial capacity of the flush buffer. static void setBufferCapacity(int size) { sizeFlushBuffer = size; } /** * A class to read in blocks stored in the old format. The only two * fields in the block were blockid and length. */ static class BlockTwo implements Writable { long blkid; long len; static { // register a ctor WritableFactories.setFactory (BlockTwo.class, new WritableFactory() { public Writable newInstance() { return new BlockTwo(); } }); } BlockTwo() { blkid = 0; len = 0; } ///////////////////////////////////// // Writable ///////////////////////////////////// public void write(DataOutput out) throws IOException { out.writeLong(blkid); out.writeLong(len); } public void readFields(DataInput in) throws IOException { this.blkid = in.readLong(); this.len = in.readLong(); } } /** This method is defined for compatibility reason. */ static private DatanodeDescriptor[] readDatanodeDescriptorArray(DataInput in ) throws IOException { DatanodeDescriptor[] locations = new DatanodeDescriptor[in.readInt()]; for (int i = 0; i < locations.length; i++) { locations[i] = new DatanodeDescriptor(); locations[i].readFieldsFromFSEditLog(in); } return locations; } static private short readShort(DataInputStream in) throws IOException { return Short.parseShort(FSImage.readString(in)); } static private long readLong(DataInputStream in) throws IOException { return Long.parseLong(FSImage.readString(in)); } static private Block[] readBlocks(DataInputStream in) throws IOException { int numBlocks = in.readInt(); Block[] blocks = new Block[numBlocks]; for (int i = 0; i < numBlocks; i++) { blocks[i] = new Block(); blocks[i].readFields(in); } return blocks; } /** * Stream wrapper that keeps track of the current file position. */ private static class PositionTrackingInputStream extends FilterInputStream { private long curPos = 0; private long markPos = -1; public PositionTrackingInputStream(InputStream is) { super(is); } public int read() throws IOException { int ret = super.read(); if (ret != -1) curPos++; return ret; } public int read(byte[] data) throws IOException { int ret = super.read(data); if (ret > 0) curPos += ret; return ret; } public int read(byte[] data, int offset, int length) throws IOException { int ret = super.read(data, offset, length); if (ret > 0) curPos += ret; return ret; } public void mark(int limit) { super.mark(limit); markPos = curPos; } public void reset() throws IOException { if (markPos == -1) { throw new IOException("Not marked!"); } super.reset(); curPos = markPos; markPos = -1; } public long getPos() { return curPos; } } }
false
true
static int loadFSEdits(EditLogInputStream edits, int tolerationLength, MetaRecoveryContext recovery) throws IOException { FSNamesystem fsNamesys = FSNamesystem.getFSNamesystem(); FSDirectory fsDir = fsNamesys.dir; int numEdits = 0; int logVersion = 0; String clientName = null; String clientMachine = null; String path = null; int numOpAdd = 0, numOpClose = 0, numOpDelete = 0, numOpRename = 0, numOpSetRepl = 0, numOpMkDir = 0, numOpSetPerm = 0, numOpSetOwner = 0, numOpSetGenStamp = 0, numOpTimes = 0, numOpGetDelegationToken = 0, numOpRenewDelegationToken = 0, numOpCancelDelegationToken = 0, numOpUpdateMasterKey = 0, numOpOther = 0; long highestGenStamp = -1; long startTime = FSNamesystem.now(); // Keep track of the file offsets of the last several opcodes. // This is handy when manually recovering corrupted edits files. PositionTrackingInputStream tracker = new PositionTrackingInputStream(new BufferedInputStream(edits)); long recentOpcodeOffsets[] = new long[4]; Arrays.fill(recentOpcodeOffsets, -1); final boolean isToterationEnabled = tolerationLength >= 0; DataInputStream in = new DataInputStream(tracker); Byte opcode = null; try { // Read log file version. Could be missing. in.mark(4); // If edits log is greater than 2G, available method will return negative // numbers, so we avoid having to call available boolean available = true; try { logVersion = in.readByte(); } catch (EOFException e) { available = false; } if (available) { in.reset(); logVersion = in.readInt(); if (logVersion < FSConstants.LAYOUT_VERSION) // future version throw new IOException( "Unexpected version of the file system log file: " + logVersion + ". Current version = " + FSConstants.LAYOUT_VERSION + "."); } assert logVersion <= Storage.LAST_UPGRADABLE_LAYOUT_VERSION : "Unsupported version " + logVersion; while (true) { if (isToterationEnabled) { //mark position could be reset in case of exceptions in.mark(TRANSACTION_LENGTH_LIMIT); } long timestamp = 0; long mtime = 0; long atime = 0; long blockSize = 0; opcode = null; try { opcode = in.readByte(); if (opcode == OP_INVALID) { LOG.info("Invalid opcode, reached end of edit log " + "Number of transactions found: " + numEdits + ". " + "Bytes read: " + tracker.getPos()); break; // no more transactions } } catch (EOFException e) { LOG.info("Reading " + edits.getName() + ": " + e); break; // no more transactions } recentOpcodeOffsets[numEdits % recentOpcodeOffsets.length] = tracker.getPos(); numEdits++; switch (opcode) { case OP_ADD: case OP_CLOSE: { // versions > 0 support per file replication // get name and replication int length = in.readInt(); if (-7 == logVersion && length != 3|| -17 < logVersion && logVersion < -7 && length != 4 || logVersion <= -17 && length != 5) { throw new IOException("Incorrect data format." + " logVersion is " + logVersion + " but writables.length is " + length + ". "); } path = FSImage.readString(in); short replication = adjustReplication(readShort(in)); mtime = readLong(in); if (logVersion <= -17) { atime = readLong(in); } if (logVersion < -7) { blockSize = readLong(in); } // get blocks Block blocks[] = null; if (logVersion <= -14) { blocks = readBlocks(in); } else { BlockTwo oldblk = new BlockTwo(); int num = in.readInt(); blocks = new Block[num]; for (int i = 0; i < num; i++) { oldblk.readFields(in); blocks[i] = new Block(oldblk.blkid, oldblk.len, Block.GRANDFATHER_GENERATION_STAMP); } } // Older versions of HDFS does not store the block size in inode. // If the file has more than one block, use the size of the // first block as the blocksize. Otherwise use the default // block size. if (-8 <= logVersion && blockSize == 0) { if (blocks.length > 1) { blockSize = blocks[0].getNumBytes(); } else { long first = ((blocks.length == 1)? blocks[0].getNumBytes(): 0); blockSize = Math.max(fsNamesys.getDefaultBlockSize(), first); } } PermissionStatus permissions = fsNamesys.getUpgradePermission(); if (logVersion <= -11) { permissions = PermissionStatus.read(in); } // clientname, clientMachine and block locations of last block. if (opcode == OP_ADD && logVersion <= -12) { clientName = FSImage.readString(in); clientMachine = FSImage.readString(in); if (-13 <= logVersion) { readDatanodeDescriptorArray(in); } } else { clientName = ""; clientMachine = ""; } // The open lease transaction re-creates a file if necessary. // Delete the file if it already exists. if (LOG.isDebugEnabled()) { LOG.debug(opcode + ": " + path + " numblocks : " + blocks.length + " clientHolder " + clientName + " clientMachine " + clientMachine); } fsDir.unprotectedDelete(path, mtime); // add to the file tree INodeFile node = (INodeFile)fsDir.unprotectedAddFile( path, permissions, blocks, replication, mtime, atime, blockSize); if (opcode == OP_ADD) { numOpAdd++; // // Replace current node with a INodeUnderConstruction. // Recreate in-memory lease record. // INodeFileUnderConstruction cons = new INodeFileUnderConstruction( node.getLocalNameBytes(), node.getReplication(), node.getModificationTime(), node.getPreferredBlockSize(), node.getBlocks(), node.getPermissionStatus(), clientName, clientMachine, null); fsDir.replaceNode(path, node, cons); fsNamesys.leaseManager.addLease(cons.clientName, path); } break; } case OP_SET_REPLICATION: { numOpSetRepl++; path = FSImage.readString(in); short replication = adjustReplication(readShort(in)); fsDir.unprotectedSetReplication(path, replication, null); break; } case OP_RENAME: { numOpRename++; int length = in.readInt(); if (length != 3) { throw new IOException("Incorrect data format. " + "Mkdir operation."); } String s = FSImage.readString(in); String d = FSImage.readString(in); timestamp = readLong(in); HdfsFileStatus dinfo = fsDir.getFileInfo(d); fsDir.unprotectedRenameTo(s, d, timestamp); fsNamesys.changeLease(s, d, dinfo); break; } case OP_DELETE: { numOpDelete++; int length = in.readInt(); if (length != 2) { throw new IOException("Incorrect data format. " + "delete operation."); } path = FSImage.readString(in); timestamp = readLong(in); fsDir.unprotectedDelete(path, timestamp); break; } case OP_MKDIR: { numOpMkDir++; PermissionStatus permissions = fsNamesys.getUpgradePermission(); int length = in.readInt(); if (-17 < logVersion && length != 2 || logVersion <= -17 && length != 3) { throw new IOException("Incorrect data format. " + "Mkdir operation."); } path = FSImage.readString(in); timestamp = readLong(in); // The disk format stores atimes for directories as well. // However, currently this is not being updated/used because of // performance reasons. if (logVersion <= -17) { atime = readLong(in); } if (logVersion <= -11) { permissions = PermissionStatus.read(in); } fsDir.unprotectedMkdir(path, permissions, timestamp); break; } case OP_SET_GENSTAMP: { numOpSetGenStamp++; long lw = in.readLong(); if ((highestGenStamp != -1) && (highestGenStamp + 1 != lw)) { throw new IOException("OP_SET_GENSTAMP tried to set a genstamp of " + lw + " but the previous highest genstamp was " + highestGenStamp); } highestGenStamp = lw; fsDir.namesystem.setGenerationStamp(lw); break; } case OP_DATANODE_ADD: { numOpOther++; FSImage.DatanodeImage nodeimage = new FSImage.DatanodeImage(); nodeimage.readFields(in); //Datnodes are not persistent any more. break; } case OP_DATANODE_REMOVE: { numOpOther++; DatanodeID nodeID = new DatanodeID(); nodeID.readFields(in); //Datanodes are not persistent any more. break; } case OP_SET_PERMISSIONS: { numOpSetPerm++; if (logVersion > -11) throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); fsDir.unprotectedSetPermission( FSImage.readString(in), FsPermission.read(in)); break; } case OP_SET_OWNER: { numOpSetOwner++; if (logVersion > -11) throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); fsDir.unprotectedSetOwner(FSImage.readString(in), FSImage.readString_EmptyAsNull(in), FSImage.readString_EmptyAsNull(in)); break; } case OP_SET_NS_QUOTA: { if (logVersion > -16) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } fsDir.unprotectedSetQuota(FSImage.readString(in), readLongWritable(in), FSConstants.QUOTA_DONT_SET); break; } case OP_CLEAR_NS_QUOTA: { if (logVersion > -16) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } fsDir.unprotectedSetQuota(FSImage.readString(in), FSConstants.QUOTA_RESET, FSConstants.QUOTA_DONT_SET); break; } case OP_SET_QUOTA: fsDir.unprotectedSetQuota(FSImage.readString(in), readLongWritable(in), readLongWritable(in)); break; case OP_TIMES: { numOpTimes++; int length = in.readInt(); if (length != 3) { throw new IOException("Incorrect data format. " + "times operation."); } path = FSImage.readString(in); mtime = readLong(in); atime = readLong(in); fsDir.unprotectedSetTimes(path, mtime, atime, true); break; } case OP_GET_DELEGATION_TOKEN: { if (logVersion > -19) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } numOpGetDelegationToken++; DelegationTokenIdentifier delegationTokenId = new DelegationTokenIdentifier(); delegationTokenId.readFields(in); long expiryTime = readLong(in); fsNamesys.getDelegationTokenSecretManager() .addPersistedDelegationToken(delegationTokenId, expiryTime); break; } case OP_RENEW_DELEGATION_TOKEN: { if (logVersion > -19) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } numOpRenewDelegationToken++; DelegationTokenIdentifier delegationTokenId = new DelegationTokenIdentifier(); delegationTokenId.readFields(in); long expiryTime = readLong(in); fsNamesys.getDelegationTokenSecretManager() .updatePersistedTokenRenewal(delegationTokenId, expiryTime); break; } case OP_CANCEL_DELEGATION_TOKEN: { if (logVersion > -19) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } numOpCancelDelegationToken++; DelegationTokenIdentifier delegationTokenId = new DelegationTokenIdentifier(); delegationTokenId.readFields(in); fsNamesys.getDelegationTokenSecretManager() .updatePersistedTokenCancellation(delegationTokenId); break; } case OP_UPDATE_MASTER_KEY: { if (logVersion > -19) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } numOpUpdateMasterKey++; DelegationKey delegationKey = new DelegationKey(); delegationKey.readFields(in); fsNamesys.getDelegationTokenSecretManager().updatePersistedMasterKey( delegationKey); break; } default: { throw new IOException("Never seen opcode " + opcode); } } } } catch (Throwable t) { String msg = "Failed to parse edit log (" + edits.getName() + ") at position " + tracker.getPos() + ", edit log length is " + edits.length() + ", opcode=" + opcode + ", isToterationEnabled=" + isToterationEnabled; // Catch Throwable because in the case of a truly corrupt edits log, any // sort of error might be thrown (NumberFormat, NullPointer, EOF, etc.) if (Storage.is203LayoutVersion(logVersion) && logVersion != FSConstants.LAYOUT_VERSION) { // Failed to load 0.20.203 version edits during upgrade. This version has // conflicting opcodes with the later releases. The editlog must be // emptied by restarting the namenode, before proceeding with the upgrade. msg += ": During upgrade, failed to load the editlog version " + logVersion + " from release 0.20.203. Please go back to the old " + " release and restart the namenode. This empties the editlog " + " and saves the namespace. Resume the upgrade after this step."; throw new IOException(msg, t); } if (recentOpcodeOffsets[0] != -1) { Arrays.sort(recentOpcodeOffsets); StringBuilder sb = new StringBuilder(", Recent opcode offsets=[") .append(recentOpcodeOffsets[0]); for (int i = 1; i < recentOpcodeOffsets.length; i++) { if (recentOpcodeOffsets[i] != -1) { sb.append(' ').append(recentOpcodeOffsets[i]); } } msg += sb.append("]"); } LOG.warn(msg, t); if (isToterationEnabled) { in.reset(); //reset to the beginning position of this transaction } else { //edit log toleration feature is disabled MetaRecoveryContext.editLogLoaderPrompt(msg, recovery); } } finally { try { checkEndOfLog(edits, in, tracker, tolerationLength); } finally { in.close(); } } LOG.info("Edits file " + edits.getName() + " of size " + edits.length() + " edits # " + numEdits + " loaded in " + (FSNamesystem.now()-startTime)/1000 + " seconds."); if (LOG.isDebugEnabled()) { LOG.debug("numOpAdd = " + numOpAdd + " numOpClose = " + numOpClose + " numOpDelete = " + numOpDelete + " numOpRename = " + numOpRename + " numOpSetRepl = " + numOpSetRepl + " numOpMkDir = " + numOpMkDir + " numOpSetPerm = " + numOpSetPerm + " numOpSetOwner = " + numOpSetOwner + " numOpSetGenStamp = " + numOpSetGenStamp + " numOpTimes = " + numOpTimes + " numOpGetDelegationToken = " + numOpGetDelegationToken + " numOpRenewDelegationToken = " + numOpRenewDelegationToken + " numOpCancelDelegationToken = " + numOpCancelDelegationToken + " numOpUpdateMasterKey = " + numOpUpdateMasterKey + " numOpOther = " + numOpOther); } if (logVersion != FSConstants.LAYOUT_VERSION) // other version numEdits++; // save this image asap return numEdits; }
static int loadFSEdits(EditLogInputStream edits, int tolerationLength, MetaRecoveryContext recovery) throws IOException { FSNamesystem fsNamesys = FSNamesystem.getFSNamesystem(); FSDirectory fsDir = fsNamesys.dir; int numEdits = 0; int logVersion = 0; String clientName = null; String clientMachine = null; String path = null; int numOpAdd = 0, numOpClose = 0, numOpDelete = 0, numOpRename = 0, numOpSetRepl = 0, numOpMkDir = 0, numOpSetPerm = 0, numOpSetOwner = 0, numOpSetGenStamp = 0, numOpTimes = 0, numOpGetDelegationToken = 0, numOpRenewDelegationToken = 0, numOpCancelDelegationToken = 0, numOpUpdateMasterKey = 0, numOpOther = 0; long highestGenStamp = -1; long startTime = FSNamesystem.now(); // Keep track of the file offsets of the last several opcodes. // This is handy when manually recovering corrupted edits files. PositionTrackingInputStream tracker = new PositionTrackingInputStream(new BufferedInputStream(edits)); long recentOpcodeOffsets[] = new long[4]; Arrays.fill(recentOpcodeOffsets, -1); final boolean isTolerationEnabled = tolerationLength >= 0; DataInputStream in = new DataInputStream(tracker); Byte opcode = null; try { // Read log file version. Could be missing. in.mark(4); // If edits log is greater than 2G, available method will return negative // numbers, so we avoid having to call available boolean available = true; try { logVersion = in.readByte(); } catch (EOFException e) { available = false; } if (available) { in.reset(); logVersion = in.readInt(); if (logVersion < FSConstants.LAYOUT_VERSION) // future version throw new IOException( "Unexpected version of the file system log file: " + logVersion + ". Current version = " + FSConstants.LAYOUT_VERSION + "."); } assert logVersion <= Storage.LAST_UPGRADABLE_LAYOUT_VERSION : "Unsupported version " + logVersion; while (true) { if (isTolerationEnabled) { //mark position could be reset in case of exceptions in.mark(TRANSACTION_LENGTH_LIMIT); } long timestamp = 0; long mtime = 0; long atime = 0; long blockSize = 0; opcode = null; try { opcode = in.readByte(); if (opcode == OP_INVALID) { LOG.info("Invalid opcode, reached end of edit log " + "Number of transactions found: " + numEdits + ". " + "Bytes read: " + tracker.getPos()); break; // no more transactions } } catch (EOFException e) { LOG.info("Reading " + edits.getName() + ": " + e); break; // no more transactions } recentOpcodeOffsets[numEdits % recentOpcodeOffsets.length] = tracker.getPos(); numEdits++; switch (opcode) { case OP_ADD: case OP_CLOSE: { // versions > 0 support per file replication // get name and replication int length = in.readInt(); if (-7 == logVersion && length != 3|| -17 < logVersion && logVersion < -7 && length != 4 || logVersion <= -17 && length != 5) { throw new IOException("Incorrect data format." + " logVersion is " + logVersion + " but writables.length is " + length + ". "); } path = FSImage.readString(in); short replication = adjustReplication(readShort(in)); mtime = readLong(in); if (logVersion <= -17) { atime = readLong(in); } if (logVersion < -7) { blockSize = readLong(in); } // get blocks Block blocks[] = null; if (logVersion <= -14) { blocks = readBlocks(in); } else { BlockTwo oldblk = new BlockTwo(); int num = in.readInt(); blocks = new Block[num]; for (int i = 0; i < num; i++) { oldblk.readFields(in); blocks[i] = new Block(oldblk.blkid, oldblk.len, Block.GRANDFATHER_GENERATION_STAMP); } } // Older versions of HDFS does not store the block size in inode. // If the file has more than one block, use the size of the // first block as the blocksize. Otherwise use the default // block size. if (-8 <= logVersion && blockSize == 0) { if (blocks.length > 1) { blockSize = blocks[0].getNumBytes(); } else { long first = ((blocks.length == 1)? blocks[0].getNumBytes(): 0); blockSize = Math.max(fsNamesys.getDefaultBlockSize(), first); } } PermissionStatus permissions = fsNamesys.getUpgradePermission(); if (logVersion <= -11) { permissions = PermissionStatus.read(in); } // clientname, clientMachine and block locations of last block. if (opcode == OP_ADD && logVersion <= -12) { clientName = FSImage.readString(in); clientMachine = FSImage.readString(in); if (-13 <= logVersion) { readDatanodeDescriptorArray(in); } } else { clientName = ""; clientMachine = ""; } // The open lease transaction re-creates a file if necessary. // Delete the file if it already exists. if (LOG.isDebugEnabled()) { LOG.debug(opcode + ": " + path + " numblocks : " + blocks.length + " clientHolder " + clientName + " clientMachine " + clientMachine); } fsDir.unprotectedDelete(path, mtime); // add to the file tree INodeFile node = (INodeFile)fsDir.unprotectedAddFile( path, permissions, blocks, replication, mtime, atime, blockSize); if (opcode == OP_ADD) { numOpAdd++; // // Replace current node with a INodeUnderConstruction. // Recreate in-memory lease record. // INodeFileUnderConstruction cons = new INodeFileUnderConstruction( node.getLocalNameBytes(), node.getReplication(), node.getModificationTime(), node.getPreferredBlockSize(), node.getBlocks(), node.getPermissionStatus(), clientName, clientMachine, null); fsDir.replaceNode(path, node, cons); fsNamesys.leaseManager.addLease(cons.clientName, path); } break; } case OP_SET_REPLICATION: { numOpSetRepl++; path = FSImage.readString(in); short replication = adjustReplication(readShort(in)); fsDir.unprotectedSetReplication(path, replication, null); break; } case OP_RENAME: { numOpRename++; int length = in.readInt(); if (length != 3) { throw new IOException("Incorrect data format. " + "Mkdir operation."); } String s = FSImage.readString(in); String d = FSImage.readString(in); timestamp = readLong(in); HdfsFileStatus dinfo = fsDir.getFileInfo(d); fsDir.unprotectedRenameTo(s, d, timestamp); fsNamesys.changeLease(s, d, dinfo); break; } case OP_DELETE: { numOpDelete++; int length = in.readInt(); if (length != 2) { throw new IOException("Incorrect data format. " + "delete operation."); } path = FSImage.readString(in); timestamp = readLong(in); fsDir.unprotectedDelete(path, timestamp); break; } case OP_MKDIR: { numOpMkDir++; PermissionStatus permissions = fsNamesys.getUpgradePermission(); int length = in.readInt(); if (-17 < logVersion && length != 2 || logVersion <= -17 && length != 3) { throw new IOException("Incorrect data format. " + "Mkdir operation."); } path = FSImage.readString(in); timestamp = readLong(in); // The disk format stores atimes for directories as well. // However, currently this is not being updated/used because of // performance reasons. if (logVersion <= -17) { atime = readLong(in); } if (logVersion <= -11) { permissions = PermissionStatus.read(in); } fsDir.unprotectedMkdir(path, permissions, timestamp); break; } case OP_SET_GENSTAMP: { numOpSetGenStamp++; long lw = in.readLong(); if ((highestGenStamp != -1) && (highestGenStamp + 1 != lw)) { throw new IOException("OP_SET_GENSTAMP tried to set a genstamp of " + lw + " but the previous highest genstamp was " + highestGenStamp); } highestGenStamp = lw; fsDir.namesystem.setGenerationStamp(lw); break; } case OP_DATANODE_ADD: { numOpOther++; FSImage.DatanodeImage nodeimage = new FSImage.DatanodeImage(); nodeimage.readFields(in); //Datnodes are not persistent any more. break; } case OP_DATANODE_REMOVE: { numOpOther++; DatanodeID nodeID = new DatanodeID(); nodeID.readFields(in); //Datanodes are not persistent any more. break; } case OP_SET_PERMISSIONS: { numOpSetPerm++; if (logVersion > -11) throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); fsDir.unprotectedSetPermission( FSImage.readString(in), FsPermission.read(in)); break; } case OP_SET_OWNER: { numOpSetOwner++; if (logVersion > -11) throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); fsDir.unprotectedSetOwner(FSImage.readString(in), FSImage.readString_EmptyAsNull(in), FSImage.readString_EmptyAsNull(in)); break; } case OP_SET_NS_QUOTA: { if (logVersion > -16) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } fsDir.unprotectedSetQuota(FSImage.readString(in), readLongWritable(in), FSConstants.QUOTA_DONT_SET); break; } case OP_CLEAR_NS_QUOTA: { if (logVersion > -16) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } fsDir.unprotectedSetQuota(FSImage.readString(in), FSConstants.QUOTA_RESET, FSConstants.QUOTA_DONT_SET); break; } case OP_SET_QUOTA: fsDir.unprotectedSetQuota(FSImage.readString(in), readLongWritable(in), readLongWritable(in)); break; case OP_TIMES: { numOpTimes++; int length = in.readInt(); if (length != 3) { throw new IOException("Incorrect data format. " + "times operation."); } path = FSImage.readString(in); mtime = readLong(in); atime = readLong(in); fsDir.unprotectedSetTimes(path, mtime, atime, true); break; } case OP_GET_DELEGATION_TOKEN: { if (logVersion > -19) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } numOpGetDelegationToken++; DelegationTokenIdentifier delegationTokenId = new DelegationTokenIdentifier(); delegationTokenId.readFields(in); long expiryTime = readLong(in); fsNamesys.getDelegationTokenSecretManager() .addPersistedDelegationToken(delegationTokenId, expiryTime); break; } case OP_RENEW_DELEGATION_TOKEN: { if (logVersion > -19) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } numOpRenewDelegationToken++; DelegationTokenIdentifier delegationTokenId = new DelegationTokenIdentifier(); delegationTokenId.readFields(in); long expiryTime = readLong(in); fsNamesys.getDelegationTokenSecretManager() .updatePersistedTokenRenewal(delegationTokenId, expiryTime); break; } case OP_CANCEL_DELEGATION_TOKEN: { if (logVersion > -19) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } numOpCancelDelegationToken++; DelegationTokenIdentifier delegationTokenId = new DelegationTokenIdentifier(); delegationTokenId.readFields(in); fsNamesys.getDelegationTokenSecretManager() .updatePersistedTokenCancellation(delegationTokenId); break; } case OP_UPDATE_MASTER_KEY: { if (logVersion > -19) { throw new IOException("Unexpected opcode " + opcode + " for version " + logVersion); } numOpUpdateMasterKey++; DelegationKey delegationKey = new DelegationKey(); delegationKey.readFields(in); fsNamesys.getDelegationTokenSecretManager().updatePersistedMasterKey( delegationKey); break; } default: { throw new IOException("Never seen opcode " + opcode); } } } } catch (Throwable t) { String msg = "Failed to parse edit log (" + edits.getName() + ") at position " + tracker.getPos() + ", edit log length is " + edits.length() + ", opcode=" + opcode + ", isTolerationEnabled=" + isTolerationEnabled; // Catch Throwable because in the case of a truly corrupt edits log, any // sort of error might be thrown (NumberFormat, NullPointer, EOF, etc.) if (Storage.is203LayoutVersion(logVersion) && logVersion != FSConstants.LAYOUT_VERSION) { // Failed to load 0.20.203 version edits during upgrade. This version has // conflicting opcodes with the later releases. The editlog must be // emptied by restarting the namenode, before proceeding with the upgrade. msg += ": During upgrade, failed to load the editlog version " + logVersion + " from release 0.20.203. Please go back to the old " + " release and restart the namenode. This empties the editlog " + " and saves the namespace. Resume the upgrade after this step."; throw new IOException(msg, t); } if (recentOpcodeOffsets[0] != -1) { Arrays.sort(recentOpcodeOffsets); StringBuilder sb = new StringBuilder(", Recent opcode offsets=[") .append(recentOpcodeOffsets[0]); for (int i = 1; i < recentOpcodeOffsets.length; i++) { if (recentOpcodeOffsets[i] != -1) { sb.append(' ').append(recentOpcodeOffsets[i]); } } msg += sb.append("]"); } LOG.warn(msg, t); if (isTolerationEnabled) { in.reset(); //reset to the beginning position of this transaction } else { //edit log toleration feature is disabled MetaRecoveryContext.editLogLoaderPrompt(msg, recovery); } } finally { try { checkEndOfLog(edits, in, tracker, tolerationLength); } finally { in.close(); } } LOG.info("Edits file " + edits.getName() + " of size " + edits.length() + " edits # " + numEdits + " loaded in " + (FSNamesystem.now()-startTime)/1000 + " seconds."); if (LOG.isDebugEnabled()) { LOG.debug("numOpAdd = " + numOpAdd + " numOpClose = " + numOpClose + " numOpDelete = " + numOpDelete + " numOpRename = " + numOpRename + " numOpSetRepl = " + numOpSetRepl + " numOpMkDir = " + numOpMkDir + " numOpSetPerm = " + numOpSetPerm + " numOpSetOwner = " + numOpSetOwner + " numOpSetGenStamp = " + numOpSetGenStamp + " numOpTimes = " + numOpTimes + " numOpGetDelegationToken = " + numOpGetDelegationToken + " numOpRenewDelegationToken = " + numOpRenewDelegationToken + " numOpCancelDelegationToken = " + numOpCancelDelegationToken + " numOpUpdateMasterKey = " + numOpUpdateMasterKey + " numOpOther = " + numOpOther); } if (logVersion != FSConstants.LAYOUT_VERSION) // other version numEdits++; // save this image asap return numEdits; }
diff --git a/src/main/java/fr/noogotte/useful_commands/exception/PlayerNotInServer.java b/src/main/java/fr/noogotte/useful_commands/exception/PlayerNotInServer.java index 5c47916..8760a7d 100644 --- a/src/main/java/fr/noogotte/useful_commands/exception/PlayerNotInServer.java +++ b/src/main/java/fr/noogotte/useful_commands/exception/PlayerNotInServer.java @@ -1,13 +1,13 @@ package fr.noogotte.useful_commands.exception; import fr.aumgn.bukkitutils.command.exception.CommandError; public class PlayerNotInServer extends CommandError { private static final long serialVersionUID = -3139364881212075182L; public PlayerNotInServer() { - super("Ce joueur n'est pas dans la partie."); + super("Ce joueur n'est pas sur le serveur."); } }
true
true
public PlayerNotInServer() { super("Ce joueur n'est pas dans la partie."); }
public PlayerNotInServer() { super("Ce joueur n'est pas sur le serveur."); }
diff --git a/src/main/java/com/github/julman99/gsonfire/postProcessors/MergeMapPostProcessor.java b/src/main/java/com/github/julman99/gsonfire/postProcessors/MergeMapPostProcessor.java index 78b52dd..69439b1 100644 --- a/src/main/java/com/github/julman99/gsonfire/postProcessors/MergeMapPostProcessor.java +++ b/src/main/java/com/github/julman99/gsonfire/postProcessors/MergeMapPostProcessor.java @@ -1,41 +1,44 @@ package com.github.julman99.gsonfire.postProcessors; import com.github.julman99.gsonfire.PostProcessor; import com.github.julman99.gsonfire.annotations.MergeMap; import com.github.julman99.gsonfire.gson.FieldInspector; import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import java.lang.reflect.Field; import java.util.Map; /** * @autor: julio */ public class MergeMapPostProcessor implements PostProcessor { private FieldInspector fieldInspector = new FieldInspector(); @Override public void postDeserialize(Object result, JsonElement src, Gson gson) { //nothing } @Override public void postSerialize(JsonElement result, Object src, Gson gson) { + if(src == null){ + return; + } for(Field f: fieldInspector.getAnnotatedFields(src.getClass(), MergeMap.class)){ try { Map map = (Map)f.get(src); JsonObject resultJsonObject = result.getAsJsonObject(); //Walk the map and merge it with the json object for (Map.Entry<String, JsonElement> entry: gson.toJsonTree(map).getAsJsonObject().entrySet()){ resultJsonObject.add(entry.getKey(), entry.getValue()); } } catch (IllegalAccessException e) { throw new RuntimeException(e); } } } }
true
true
public void postSerialize(JsonElement result, Object src, Gson gson) { for(Field f: fieldInspector.getAnnotatedFields(src.getClass(), MergeMap.class)){ try { Map map = (Map)f.get(src); JsonObject resultJsonObject = result.getAsJsonObject(); //Walk the map and merge it with the json object for (Map.Entry<String, JsonElement> entry: gson.toJsonTree(map).getAsJsonObject().entrySet()){ resultJsonObject.add(entry.getKey(), entry.getValue()); } } catch (IllegalAccessException e) { throw new RuntimeException(e); } } }
public void postSerialize(JsonElement result, Object src, Gson gson) { if(src == null){ return; } for(Field f: fieldInspector.getAnnotatedFields(src.getClass(), MergeMap.class)){ try { Map map = (Map)f.get(src); JsonObject resultJsonObject = result.getAsJsonObject(); //Walk the map and merge it with the json object for (Map.Entry<String, JsonElement> entry: gson.toJsonTree(map).getAsJsonObject().entrySet()){ resultJsonObject.add(entry.getKey(), entry.getValue()); } } catch (IllegalAccessException e) { throw new RuntimeException(e); } } }
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppendRestart.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppendRestart.java index 816332d0a7..e10eab8c57 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppendRestart.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppendRestart.java @@ -1,176 +1,179 @@ /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import static org.junit.Assert.assertEquals; import java.io.File; import java.io.IOException; import java.util.EnumMap; import java.util.Random; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption; import org.apache.hadoop.hdfs.server.namenode.FSEditLog; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes; import org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil; import org.apache.hadoop.hdfs.server.namenode.NNStorage; import org.apache.hadoop.hdfs.util.Holder; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Test; /** * Unit test to make sure that Append properly logs the right * things to the edit log, such that files aren't lost or truncated * on restart. */ public class TestFileAppendRestart { private static final int BLOCK_SIZE = 4096; private static final String HADOOP_23_BROKEN_APPEND_TGZ = "image-with-buggy-append.tgz"; private void writeAndAppend(FileSystem fs, Path p, int lengthForCreate, int lengthForAppend) throws IOException { // Creating a file with 4096 blockSize to write multiple blocks FSDataOutputStream stream = fs.create( p, true, BLOCK_SIZE, (short) 1, BLOCK_SIZE); try { AppendTestUtil.write(stream, 0, lengthForCreate); stream.close(); stream = fs.append(p); AppendTestUtil.write(stream, lengthForCreate, lengthForAppend); stream.close(); } finally { IOUtils.closeStream(stream); } int totalLength = lengthForCreate + lengthForAppend; assertEquals(totalLength, fs.getFileStatus(p).getLen()); } /** * Regression test for HDFS-2991. Creates and appends to files * where blocks start/end on block boundaries. */ @Test public void testAppendRestart() throws Exception { final Configuration conf = new HdfsConfiguration(); // Turn off persistent IPC, so that the DFSClient can survive NN restart conf.setInt( CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY, 0); MiniDFSCluster cluster = null; FSDataOutputStream stream = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); FileSystem fs = cluster.getFileSystem(); File editLog = new File(FSImageTestUtil.getNameNodeCurrentDirs(cluster, 0).get(0), NNStorage.getInProgressEditsFileName(1)); EnumMap<FSEditLogOpCodes, Holder<Integer>> counts; Path p1 = new Path("/block-boundaries"); writeAndAppend(fs, p1, BLOCK_SIZE, BLOCK_SIZE); counts = FSImageTestUtil.countEditLogOpTypes(editLog); // OP_ADD to create file - // OP_ADD for first block + // OP_UPDATE_BLOCKS for first block // OP_CLOSE to close file // OP_ADD to reopen file - // OP_ADD for second block + // OP_UPDATE_BLOCKS for second block // OP_CLOSE to close file - assertEquals(4, (int)counts.get(FSEditLogOpCodes.OP_ADD).held); + assertEquals(2, (int)counts.get(FSEditLogOpCodes.OP_ADD).held); + assertEquals(2, (int)counts.get(FSEditLogOpCodes.OP_UPDATE_BLOCKS).held); assertEquals(2, (int)counts.get(FSEditLogOpCodes.OP_CLOSE).held); Path p2 = new Path("/not-block-boundaries"); writeAndAppend(fs, p2, BLOCK_SIZE/2, BLOCK_SIZE); counts = FSImageTestUtil.countEditLogOpTypes(editLog); // OP_ADD to create file - // OP_ADD for first block + // OP_UPDATE_BLOCKS for first block // OP_CLOSE to close file // OP_ADD to re-establish the lease - // OP_ADD from the updatePipeline call (increments genstamp of last block) - // OP_ADD at the start of the second block + // OP_UPDATE_BLOCKS from the updatePipeline call (increments genstamp of last block) + // OP_UPDATE_BLOCKS at the start of the second block // OP_CLOSE to close file - // Total: 5 OP_ADDs and 2 OP_CLOSEs in addition to the ones above - assertEquals(9, (int)counts.get(FSEditLogOpCodes.OP_ADD).held); - assertEquals(4, (int)counts.get(FSEditLogOpCodes.OP_CLOSE).held); + // Total: 2 OP_ADDs, 3 OP_UPDATE_BLOCKS, and 2 OP_CLOSEs in addition + // to the ones above + assertEquals(2+2, (int)counts.get(FSEditLogOpCodes.OP_ADD).held); + assertEquals(2+3, (int)counts.get(FSEditLogOpCodes.OP_UPDATE_BLOCKS).held); + assertEquals(2+2, (int)counts.get(FSEditLogOpCodes.OP_CLOSE).held); cluster.restartNameNode(); AppendTestUtil.check(fs, p1, 2*BLOCK_SIZE); AppendTestUtil.check(fs, p2, 3*BLOCK_SIZE/2); } finally { IOUtils.closeStream(stream); if (cluster != null) { cluster.shutdown(); } } } /** * Earlier versions of HDFS had a bug (HDFS-2991) which caused * append(), when called exactly at a block boundary, * to not log an OP_ADD. This ensures that we can read from * such buggy versions correctly, by loading an image created * using a namesystem image created with 0.23.1-rc2 exhibiting * the issue. */ @Test public void testLoadLogsFromBuggyEarlierVersions() throws IOException { final Configuration conf = new HdfsConfiguration(); String tarFile = System.getProperty("test.cache.data", "build/test/cache") + "/" + HADOOP_23_BROKEN_APPEND_TGZ; String testDir = System.getProperty("test.build.data", "build/test/data"); File dfsDir = new File(testDir, "image-with-buggy-append"); if (dfsDir.exists() && !FileUtil.fullyDelete(dfsDir)) { throw new IOException("Could not delete dfs directory '" + dfsDir + "'"); } FileUtil.unTar(new File(tarFile), new File(testDir)); File nameDir = new File(dfsDir, "name"); GenericTestUtils.assertExists(nameDir); conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, nameDir.getAbsolutePath()); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0) .format(false) .manageDataDfsDirs(false) .manageNameDfsDirs(false) .numDataNodes(0) .waitSafeMode(false) .startupOption(StartupOption.UPGRADE) .build(); try { FileSystem fs = cluster.getFileSystem(); Path testPath = new Path("/tmp/io_data/test_io_0"); assertEquals(2*1024*1024, fs.getFileStatus(testPath).getLen()); } finally { cluster.shutdown(); } } }
false
true
public void testAppendRestart() throws Exception { final Configuration conf = new HdfsConfiguration(); // Turn off persistent IPC, so that the DFSClient can survive NN restart conf.setInt( CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY, 0); MiniDFSCluster cluster = null; FSDataOutputStream stream = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); FileSystem fs = cluster.getFileSystem(); File editLog = new File(FSImageTestUtil.getNameNodeCurrentDirs(cluster, 0).get(0), NNStorage.getInProgressEditsFileName(1)); EnumMap<FSEditLogOpCodes, Holder<Integer>> counts; Path p1 = new Path("/block-boundaries"); writeAndAppend(fs, p1, BLOCK_SIZE, BLOCK_SIZE); counts = FSImageTestUtil.countEditLogOpTypes(editLog); // OP_ADD to create file // OP_ADD for first block // OP_CLOSE to close file // OP_ADD to reopen file // OP_ADD for second block // OP_CLOSE to close file assertEquals(4, (int)counts.get(FSEditLogOpCodes.OP_ADD).held); assertEquals(2, (int)counts.get(FSEditLogOpCodes.OP_CLOSE).held); Path p2 = new Path("/not-block-boundaries"); writeAndAppend(fs, p2, BLOCK_SIZE/2, BLOCK_SIZE); counts = FSImageTestUtil.countEditLogOpTypes(editLog); // OP_ADD to create file // OP_ADD for first block // OP_CLOSE to close file // OP_ADD to re-establish the lease // OP_ADD from the updatePipeline call (increments genstamp of last block) // OP_ADD at the start of the second block // OP_CLOSE to close file // Total: 5 OP_ADDs and 2 OP_CLOSEs in addition to the ones above assertEquals(9, (int)counts.get(FSEditLogOpCodes.OP_ADD).held); assertEquals(4, (int)counts.get(FSEditLogOpCodes.OP_CLOSE).held); cluster.restartNameNode(); AppendTestUtil.check(fs, p1, 2*BLOCK_SIZE); AppendTestUtil.check(fs, p2, 3*BLOCK_SIZE/2); } finally { IOUtils.closeStream(stream); if (cluster != null) { cluster.shutdown(); } } }
public void testAppendRestart() throws Exception { final Configuration conf = new HdfsConfiguration(); // Turn off persistent IPC, so that the DFSClient can survive NN restart conf.setInt( CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY, 0); MiniDFSCluster cluster = null; FSDataOutputStream stream = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); FileSystem fs = cluster.getFileSystem(); File editLog = new File(FSImageTestUtil.getNameNodeCurrentDirs(cluster, 0).get(0), NNStorage.getInProgressEditsFileName(1)); EnumMap<FSEditLogOpCodes, Holder<Integer>> counts; Path p1 = new Path("/block-boundaries"); writeAndAppend(fs, p1, BLOCK_SIZE, BLOCK_SIZE); counts = FSImageTestUtil.countEditLogOpTypes(editLog); // OP_ADD to create file // OP_UPDATE_BLOCKS for first block // OP_CLOSE to close file // OP_ADD to reopen file // OP_UPDATE_BLOCKS for second block // OP_CLOSE to close file assertEquals(2, (int)counts.get(FSEditLogOpCodes.OP_ADD).held); assertEquals(2, (int)counts.get(FSEditLogOpCodes.OP_UPDATE_BLOCKS).held); assertEquals(2, (int)counts.get(FSEditLogOpCodes.OP_CLOSE).held); Path p2 = new Path("/not-block-boundaries"); writeAndAppend(fs, p2, BLOCK_SIZE/2, BLOCK_SIZE); counts = FSImageTestUtil.countEditLogOpTypes(editLog); // OP_ADD to create file // OP_UPDATE_BLOCKS for first block // OP_CLOSE to close file // OP_ADD to re-establish the lease // OP_UPDATE_BLOCKS from the updatePipeline call (increments genstamp of last block) // OP_UPDATE_BLOCKS at the start of the second block // OP_CLOSE to close file // Total: 2 OP_ADDs, 3 OP_UPDATE_BLOCKS, and 2 OP_CLOSEs in addition // to the ones above assertEquals(2+2, (int)counts.get(FSEditLogOpCodes.OP_ADD).held); assertEquals(2+3, (int)counts.get(FSEditLogOpCodes.OP_UPDATE_BLOCKS).held); assertEquals(2+2, (int)counts.get(FSEditLogOpCodes.OP_CLOSE).held); cluster.restartNameNode(); AppendTestUtil.check(fs, p1, 2*BLOCK_SIZE); AppendTestUtil.check(fs, p2, 3*BLOCK_SIZE/2); } finally { IOUtils.closeStream(stream); if (cluster != null) { cluster.shutdown(); } } }
diff --git a/extras/src/kg/apc/jmeter/config/VariablesFromCSV.java b/extras/src/kg/apc/jmeter/config/VariablesFromCSV.java index 38d95574..6f981e40 100644 --- a/extras/src/kg/apc/jmeter/config/VariablesFromCSV.java +++ b/extras/src/kg/apc/jmeter/config/VariablesFromCSV.java @@ -1,78 +1,77 @@ package kg.apc.jmeter.config; import java.util.Iterator; import java.util.Map; import org.apache.jmeter.config.Arguments; /** * * @author Stephane Hoblingre */ public class VariablesFromCSV extends Arguments { public static final String VARIABLE_PREFIX = "variablesPrefix"; public static final String FILENAME = "filename"; public static final String SEPARATOR = "delimiter"; public static final String SKIP_LINES = "skipLines"; public static final int SKIP_LINES_DEFAULT = 0; public static final String STORE_SYS_PROP = "storeSysProp"; // It seems org.apache.jmeter.engine.Precompiler requires only this // https://groups.google.com/forum/#!topic/jmeter-plugins/gWn7MTgvTfE method @Override public Map<String, String> getArgumentsAsMap() { Map<String, String> variables = new VariableFromCsvFileReader(getFileName()).getDataAsMap(getVariablePrefix(), getSeparator(), getSkipLines()); //store in System Properties also if (isStoreAsSystemProperty()) { - Iterator<String> iter = variables.keySet().iterator(); - while (iter.hasNext()) { - String variable = iter.next(); + for (Map.Entry<String, String> element : variables.entrySet()) { + String variable = element.getKey(); if (System.getProperty(variable) == null) { - System.setProperty(variable, variables.get(variable)); + System.setProperty(variable, element.getValue()); } } } return variables; } public String getVariablePrefix() { return getPropertyAsString(VARIABLE_PREFIX); } public void setVariablePrefix(String prefix) { setProperty(VARIABLE_PREFIX, prefix); } public String getFileName() { return getPropertyAsString(FILENAME); } public void setFileName(String filename) { setProperty(FILENAME, filename); } public String getSeparator() { return getPropertyAsString(SEPARATOR); } public void setSeparator(String separator) { setProperty(SEPARATOR, separator); } public int getSkipLines() { return getPropertyAsInt(SKIP_LINES, SKIP_LINES_DEFAULT); } public void setSkipLines(int skipLines) { setProperty(SKIP_LINES, skipLines); } public boolean isStoreAsSystemProperty() { return getPropertyAsBoolean(STORE_SYS_PROP); } public void setStoreAsSystemProperty(boolean storeAsSysProp) { setProperty(STORE_SYS_PROP, storeAsSysProp); } }
false
true
public Map<String, String> getArgumentsAsMap() { Map<String, String> variables = new VariableFromCsvFileReader(getFileName()).getDataAsMap(getVariablePrefix(), getSeparator(), getSkipLines()); //store in System Properties also if (isStoreAsSystemProperty()) { Iterator<String> iter = variables.keySet().iterator(); while (iter.hasNext()) { String variable = iter.next(); if (System.getProperty(variable) == null) { System.setProperty(variable, variables.get(variable)); } } } return variables; }
public Map<String, String> getArgumentsAsMap() { Map<String, String> variables = new VariableFromCsvFileReader(getFileName()).getDataAsMap(getVariablePrefix(), getSeparator(), getSkipLines()); //store in System Properties also if (isStoreAsSystemProperty()) { for (Map.Entry<String, String> element : variables.entrySet()) { String variable = element.getKey(); if (System.getProperty(variable) == null) { System.setProperty(variable, element.getValue()); } } } return variables; }
diff --git a/htroot/ConfigUpdate_p.java b/htroot/ConfigUpdate_p.java index fbdf11854..139f292dd 100644 --- a/htroot/ConfigUpdate_p.java +++ b/htroot/ConfigUpdate_p.java @@ -1,236 +1,236 @@ // ConfigUpdate_p.java // (C) 2007 by Michael Peter Christen; [email protected], Frankfurt a. M., Germany // first published 11.07.2007 on http://yacy.net // // This is a part of YaCy, a peer-to-peer based web search engine // // $LastChangedDate: 2006-04-02 22:40:07 +0200 (So, 02 Apr 2006) $ // $LastChangedRevision: 1986 $ // $LastChangedBy: orbiter $ // // LICENSE // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation; either version 2 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA import java.io.File; import java.io.IOException; import java.util.Date; import java.util.Iterator; import java.util.TreeSet; import de.anomic.http.httpHeader; import de.anomic.plasma.plasmaSwitchboard; import de.anomic.server.serverObjects; import de.anomic.server.serverSwitch; import de.anomic.server.serverSystem; import de.anomic.yacy.yacyURL; import de.anomic.yacy.yacyVersion; public class ConfigUpdate_p { public static serverObjects respond(httpHeader header, serverObjects post, serverSwitch<?> env) { // return variable that accumulates replacements final serverObjects prop = new serverObjects(); final plasmaSwitchboard sb = (plasmaSwitchboard) env; prop.put("candeploy_configCommit", "0"); prop.put("candeploy_autoUpdate", "0"); if (post != null) { if (post.containsKey("update")) { prop.put("forwardToSteering", "1"); prop.put("forwardToSteering_release",post.get("releaseinstall", "")); prop.put("deploys", "1"); prop.put("candeploy", "2"); // display nothing else return prop; } if (post.containsKey("downloadRelease")) { // download a release String release = post.get("releasedownload", ""); if (release.length() > 0) { try { yacyVersion.downloadRelease(new yacyVersion(new yacyURL(release, null))); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } if (post.containsKey("checkRelease")) { yacyVersion.allReleases(true); } if (post.containsKey("deleteRelease")) { String release = post.get("releaseinstall", ""); if(release.length() > 0) { try { new File(sb.releasePath, release).delete(); } catch (NullPointerException e) { sb.getLog().logSevere("AUTO-UPDATE: could not delete release " + release + ": " + e.getMessage()); } } } if (post.containsKey("autoUpdate")) { yacyVersion updateVersion = yacyVersion.rulebasedUpdateInfo(true); if (updateVersion == null) { prop.put("candeploy_autoUpdate", "2"); // no more recent release found } else { // there is a version that is more recent. Load it and re-start with it sb.getLog().logInfo("AUTO-UPDATE: downloading more recent release " + updateVersion.url); File downloaded = yacyVersion.downloadRelease(updateVersion); prop.put("candeploy_autoUpdate_downloadedRelease", updateVersion.name); boolean devenvironment = yacyVersion.combined2prettyVersion(sb.getConfig("version","0.1")).startsWith("dev"); if (devenvironment) { sb.getLog().logInfo("AUTO-UPDATE: omiting update because this is a development environment"); prop.put("candeploy_autoUpdate", "3"); } else if ((downloaded == null) || (!downloaded.exists()) || (downloaded.length() == 0)) { sb.getLog().logInfo("AUTO-UPDATE: omiting update because download failed (file cannot be found or is too small)"); prop.put("candeploy_autoUpdate", "4"); } else { yacyVersion.deployRelease(downloaded); sb.terminate(5000); sb.getLog().logInfo("AUTO-UPDATE: deploy and restart initiated"); prop.put("candeploy_autoUpdate", "1"); } } } if (post.containsKey("configSubmit")) { prop.put("candeploy_configCommit", "1"); sb.setConfig("update.process", (post.get("updateMode", "manual").equals("manual")) ? "manual" : "auto"); sb.setConfig("update.cycle", Math.max(12, post.getLong("cycle", 168))); sb.setConfig("update.blacklist", post.get("blacklist", "")); sb.setConfig("update.concept", (post.get("releaseType", "any").equals("any")) ? "any" : "main"); } } // set if this should be visible if (serverSystem.canExecUnix || serverSystem.isWindows) { // we can deploy a new system with (i.e.) // cd DATA/RELEASE;tar xfz $1;cp -Rf yacy/* ../../;rm -Rf yacy prop.put("candeploy", "1"); } else { prop.put("candeploy", "0"); } // version information String versionstring = yacyVersion.combined2prettyVersion(sb.getConfig("version","0.1")); prop.put("candeploy_versionpp", versionstring); boolean devenvironment = versionstring.startsWith("dev"); double thisVersion = Double.parseDouble(sb.getConfig("version","0.1")); // cut off the SVN Rev in the Version try {thisVersion = Math.round(thisVersion*1000.0)/1000.0;} catch (NumberFormatException e) {} // list downloaded releases yacyVersion release, dflt; String[] downloaded = sb.releasePath.list(); prop.put("candeploy_deployenabled", (downloaded.length == 0) ? "0" : ((devenvironment) ? "1" : "2")); // prevent that a developer-version is over-deployed TreeSet<yacyVersion> downloadedreleases = new TreeSet<yacyVersion>(); for (int j = 0; j < downloaded.length; j++) { try { release = new yacyVersion(downloaded[j]); downloadedreleases.add(release); } catch (RuntimeException e) { // not a valid release // can be also a restart- or deploy-file File invalid = new File(sb.releasePath, downloaded[j]); - if (!invalid.getName().contains(".bat")) // Windows doesn't like deleted scripts + if (!(invalid.getName().endsWith(".bat") || invalid.getName().endsWith(".sh"))) // Windows & Linux don't like deleted scripts while execution! invalid.deleteOnExit(); } } dflt = (downloadedreleases.size() == 0) ? null : downloadedreleases.last(); Iterator<yacyVersion> i = downloadedreleases.iterator(); int relcount = 0; while (i.hasNext()) { release = i.next(); prop.put("candeploy_downloadedreleases_" + relcount + "_name", ((release.mainRelease) ? "main" : "dev") + " " + release.releaseNr + "/" + release.svn); prop.put("candeploy_downloadedreleases_" + relcount + "_file", release.name); prop.put("candeploy_downloadedreleases_" + relcount + "_selected", (release == dflt) ? "1" : "0"); relcount++; } prop.put("candeploy_downloadedreleases", relcount); // list remotely available releases yacyVersion.DevMain releasess = yacyVersion.allReleases(false); relcount = 0; // main TreeSet<yacyVersion> releases = releasess.main; releases.removeAll(downloadedreleases); i = releases.iterator(); while (i.hasNext()) { release = i.next(); prop.put("candeploy_availreleases_" + relcount + "_name", ((release.mainRelease) ? "main" : "dev") + " " + release.releaseNr + "/" + release.svn); prop.put("candeploy_availreleases_" + relcount + "_url", release.url.toString()); prop.put("candeploy_availreleases_" + relcount + "_selected", "0"); relcount++; } // dev dflt = (releasess.dev.size() == 0) ? null : releasess.dev.last(); releases = releasess.dev; releases.removeAll(downloadedreleases); i = releases.iterator(); while (i.hasNext()) { release = i.next(); prop.put("candeploy_availreleases_" + relcount + "_name", ((release.mainRelease) ? "main" : "dev") + " " + release.releaseNr + "/" + release.svn); prop.put("candeploy_availreleases_" + relcount + "_url", release.url.toString()); prop.put("candeploy_availreleases_" + relcount + "_selected", (release == dflt) ? "1" : "0"); relcount++; } prop.put("candeploy_availreleases", relcount); // properties for automated system update prop.put("candeploy_manualUpdateChecked", (sb.getConfig("update.process", "manual").equals("manual")) ? "1" : "0"); prop.put("candeploy_autoUpdateChecked", (sb.getConfig("update.process", "manual").equals("auto")) ? "1" : "0"); prop.put("candeploy_cycle", sb.getConfigLong("update.cycle", 168)); prop.put("candeploy_blacklist", sb.getConfig("update.blacklist", "")); prop.put("candeploy_releaseTypeMainChecked", (sb.getConfig("update.concept", "any").equals("any")) ? "0" : "1"); prop.put("candeploy_releaseTypeAnyChecked", (sb.getConfig("update.concept", "any").equals("any")) ? "1" : "0"); prop.put("candeploy_lastlookup", (sb.getConfigLong("update.time.lookup", 0) == 0) ? "0" : "1"); prop.put("candeploy_lastlookup_time", new Date(sb.getConfigLong("update.time.lookup", 0)).toString()); prop.put("candeploy_lastdownload", (sb.getConfigLong("update.time.download", 0) == 0) ? "0" : "1"); prop.put("candeploy_lastdownload_time", new Date(sb.getConfigLong("update.time.download", 0)).toString()); prop.put("candeploy_lastdeploy", (sb.getConfigLong("update.time.deploy", 0) == 0) ? "0" : "1"); prop.put("candeploy_lastdeploy_time", new Date(sb.getConfigLong("update.time.deploy", 0)).toString()); /* if ((adminaccess) && (yacyVersion.latestRelease >= (thisVersion+0.01))) { // only new Versions(not new SVN) if ((yacyVersion.latestMainRelease != null) || (yacyVersion.latestDevRelease != null)) { prop.put("hintVersionDownload", 1); } else if ((post != null) && (post.containsKey("aquirerelease"))) { yacyVersion.aquireLatestReleaseInfo(); prop.put("hintVersionDownload", 1); } else { prop.put("hintVersionAvailable", 1); } } prop.put("hintVersionAvailable", 1); // for testing prop.putASIS("hintVersionDownload_versionResMain", (yacyVersion.latestMainRelease == null) ? "-" : yacyVersion.latestMainRelease.toAnchor()); prop.putASIS("hintVersionDownload_versionResDev", (yacyVersion.latestDevRelease == null) ? "-" : yacyVersion.latestDevRelease.toAnchor()); prop.put("hintVersionAvailable_latestVersion", Double.toString(yacyVersion.latestRelease)); */ return prop; } }
true
true
public static serverObjects respond(httpHeader header, serverObjects post, serverSwitch<?> env) { // return variable that accumulates replacements final serverObjects prop = new serverObjects(); final plasmaSwitchboard sb = (plasmaSwitchboard) env; prop.put("candeploy_configCommit", "0"); prop.put("candeploy_autoUpdate", "0"); if (post != null) { if (post.containsKey("update")) { prop.put("forwardToSteering", "1"); prop.put("forwardToSteering_release",post.get("releaseinstall", "")); prop.put("deploys", "1"); prop.put("candeploy", "2"); // display nothing else return prop; } if (post.containsKey("downloadRelease")) { // download a release String release = post.get("releasedownload", ""); if (release.length() > 0) { try { yacyVersion.downloadRelease(new yacyVersion(new yacyURL(release, null))); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } if (post.containsKey("checkRelease")) { yacyVersion.allReleases(true); } if (post.containsKey("deleteRelease")) { String release = post.get("releaseinstall", ""); if(release.length() > 0) { try { new File(sb.releasePath, release).delete(); } catch (NullPointerException e) { sb.getLog().logSevere("AUTO-UPDATE: could not delete release " + release + ": " + e.getMessage()); } } } if (post.containsKey("autoUpdate")) { yacyVersion updateVersion = yacyVersion.rulebasedUpdateInfo(true); if (updateVersion == null) { prop.put("candeploy_autoUpdate", "2"); // no more recent release found } else { // there is a version that is more recent. Load it and re-start with it sb.getLog().logInfo("AUTO-UPDATE: downloading more recent release " + updateVersion.url); File downloaded = yacyVersion.downloadRelease(updateVersion); prop.put("candeploy_autoUpdate_downloadedRelease", updateVersion.name); boolean devenvironment = yacyVersion.combined2prettyVersion(sb.getConfig("version","0.1")).startsWith("dev"); if (devenvironment) { sb.getLog().logInfo("AUTO-UPDATE: omiting update because this is a development environment"); prop.put("candeploy_autoUpdate", "3"); } else if ((downloaded == null) || (!downloaded.exists()) || (downloaded.length() == 0)) { sb.getLog().logInfo("AUTO-UPDATE: omiting update because download failed (file cannot be found or is too small)"); prop.put("candeploy_autoUpdate", "4"); } else { yacyVersion.deployRelease(downloaded); sb.terminate(5000); sb.getLog().logInfo("AUTO-UPDATE: deploy and restart initiated"); prop.put("candeploy_autoUpdate", "1"); } } } if (post.containsKey("configSubmit")) { prop.put("candeploy_configCommit", "1"); sb.setConfig("update.process", (post.get("updateMode", "manual").equals("manual")) ? "manual" : "auto"); sb.setConfig("update.cycle", Math.max(12, post.getLong("cycle", 168))); sb.setConfig("update.blacklist", post.get("blacklist", "")); sb.setConfig("update.concept", (post.get("releaseType", "any").equals("any")) ? "any" : "main"); } } // set if this should be visible if (serverSystem.canExecUnix || serverSystem.isWindows) { // we can deploy a new system with (i.e.) // cd DATA/RELEASE;tar xfz $1;cp -Rf yacy/* ../../;rm -Rf yacy prop.put("candeploy", "1"); } else { prop.put("candeploy", "0"); } // version information String versionstring = yacyVersion.combined2prettyVersion(sb.getConfig("version","0.1")); prop.put("candeploy_versionpp", versionstring); boolean devenvironment = versionstring.startsWith("dev"); double thisVersion = Double.parseDouble(sb.getConfig("version","0.1")); // cut off the SVN Rev in the Version try {thisVersion = Math.round(thisVersion*1000.0)/1000.0;} catch (NumberFormatException e) {} // list downloaded releases yacyVersion release, dflt; String[] downloaded = sb.releasePath.list(); prop.put("candeploy_deployenabled", (downloaded.length == 0) ? "0" : ((devenvironment) ? "1" : "2")); // prevent that a developer-version is over-deployed TreeSet<yacyVersion> downloadedreleases = new TreeSet<yacyVersion>(); for (int j = 0; j < downloaded.length; j++) { try { release = new yacyVersion(downloaded[j]); downloadedreleases.add(release); } catch (RuntimeException e) { // not a valid release // can be also a restart- or deploy-file File invalid = new File(sb.releasePath, downloaded[j]); if (!invalid.getName().contains(".bat")) // Windows doesn't like deleted scripts invalid.deleteOnExit(); } } dflt = (downloadedreleases.size() == 0) ? null : downloadedreleases.last(); Iterator<yacyVersion> i = downloadedreleases.iterator(); int relcount = 0; while (i.hasNext()) { release = i.next(); prop.put("candeploy_downloadedreleases_" + relcount + "_name", ((release.mainRelease) ? "main" : "dev") + " " + release.releaseNr + "/" + release.svn); prop.put("candeploy_downloadedreleases_" + relcount + "_file", release.name); prop.put("candeploy_downloadedreleases_" + relcount + "_selected", (release == dflt) ? "1" : "0"); relcount++; } prop.put("candeploy_downloadedreleases", relcount); // list remotely available releases yacyVersion.DevMain releasess = yacyVersion.allReleases(false); relcount = 0; // main TreeSet<yacyVersion> releases = releasess.main; releases.removeAll(downloadedreleases); i = releases.iterator(); while (i.hasNext()) { release = i.next(); prop.put("candeploy_availreleases_" + relcount + "_name", ((release.mainRelease) ? "main" : "dev") + " " + release.releaseNr + "/" + release.svn); prop.put("candeploy_availreleases_" + relcount + "_url", release.url.toString()); prop.put("candeploy_availreleases_" + relcount + "_selected", "0"); relcount++; } // dev dflt = (releasess.dev.size() == 0) ? null : releasess.dev.last(); releases = releasess.dev; releases.removeAll(downloadedreleases); i = releases.iterator(); while (i.hasNext()) { release = i.next(); prop.put("candeploy_availreleases_" + relcount + "_name", ((release.mainRelease) ? "main" : "dev") + " " + release.releaseNr + "/" + release.svn); prop.put("candeploy_availreleases_" + relcount + "_url", release.url.toString()); prop.put("candeploy_availreleases_" + relcount + "_selected", (release == dflt) ? "1" : "0"); relcount++; } prop.put("candeploy_availreleases", relcount); // properties for automated system update prop.put("candeploy_manualUpdateChecked", (sb.getConfig("update.process", "manual").equals("manual")) ? "1" : "0"); prop.put("candeploy_autoUpdateChecked", (sb.getConfig("update.process", "manual").equals("auto")) ? "1" : "0"); prop.put("candeploy_cycle", sb.getConfigLong("update.cycle", 168)); prop.put("candeploy_blacklist", sb.getConfig("update.blacklist", "")); prop.put("candeploy_releaseTypeMainChecked", (sb.getConfig("update.concept", "any").equals("any")) ? "0" : "1"); prop.put("candeploy_releaseTypeAnyChecked", (sb.getConfig("update.concept", "any").equals("any")) ? "1" : "0"); prop.put("candeploy_lastlookup", (sb.getConfigLong("update.time.lookup", 0) == 0) ? "0" : "1"); prop.put("candeploy_lastlookup_time", new Date(sb.getConfigLong("update.time.lookup", 0)).toString()); prop.put("candeploy_lastdownload", (sb.getConfigLong("update.time.download", 0) == 0) ? "0" : "1"); prop.put("candeploy_lastdownload_time", new Date(sb.getConfigLong("update.time.download", 0)).toString()); prop.put("candeploy_lastdeploy", (sb.getConfigLong("update.time.deploy", 0) == 0) ? "0" : "1"); prop.put("candeploy_lastdeploy_time", new Date(sb.getConfigLong("update.time.deploy", 0)).toString()); /* if ((adminaccess) && (yacyVersion.latestRelease >= (thisVersion+0.01))) { // only new Versions(not new SVN) if ((yacyVersion.latestMainRelease != null) || (yacyVersion.latestDevRelease != null)) { prop.put("hintVersionDownload", 1); } else if ((post != null) && (post.containsKey("aquirerelease"))) { yacyVersion.aquireLatestReleaseInfo(); prop.put("hintVersionDownload", 1); } else { prop.put("hintVersionAvailable", 1); } } prop.put("hintVersionAvailable", 1); // for testing prop.putASIS("hintVersionDownload_versionResMain", (yacyVersion.latestMainRelease == null) ? "-" : yacyVersion.latestMainRelease.toAnchor()); prop.putASIS("hintVersionDownload_versionResDev", (yacyVersion.latestDevRelease == null) ? "-" : yacyVersion.latestDevRelease.toAnchor()); prop.put("hintVersionAvailable_latestVersion", Double.toString(yacyVersion.latestRelease)); */ return prop; }
public static serverObjects respond(httpHeader header, serverObjects post, serverSwitch<?> env) { // return variable that accumulates replacements final serverObjects prop = new serverObjects(); final plasmaSwitchboard sb = (plasmaSwitchboard) env; prop.put("candeploy_configCommit", "0"); prop.put("candeploy_autoUpdate", "0"); if (post != null) { if (post.containsKey("update")) { prop.put("forwardToSteering", "1"); prop.put("forwardToSteering_release",post.get("releaseinstall", "")); prop.put("deploys", "1"); prop.put("candeploy", "2"); // display nothing else return prop; } if (post.containsKey("downloadRelease")) { // download a release String release = post.get("releasedownload", ""); if (release.length() > 0) { try { yacyVersion.downloadRelease(new yacyVersion(new yacyURL(release, null))); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } if (post.containsKey("checkRelease")) { yacyVersion.allReleases(true); } if (post.containsKey("deleteRelease")) { String release = post.get("releaseinstall", ""); if(release.length() > 0) { try { new File(sb.releasePath, release).delete(); } catch (NullPointerException e) { sb.getLog().logSevere("AUTO-UPDATE: could not delete release " + release + ": " + e.getMessage()); } } } if (post.containsKey("autoUpdate")) { yacyVersion updateVersion = yacyVersion.rulebasedUpdateInfo(true); if (updateVersion == null) { prop.put("candeploy_autoUpdate", "2"); // no more recent release found } else { // there is a version that is more recent. Load it and re-start with it sb.getLog().logInfo("AUTO-UPDATE: downloading more recent release " + updateVersion.url); File downloaded = yacyVersion.downloadRelease(updateVersion); prop.put("candeploy_autoUpdate_downloadedRelease", updateVersion.name); boolean devenvironment = yacyVersion.combined2prettyVersion(sb.getConfig("version","0.1")).startsWith("dev"); if (devenvironment) { sb.getLog().logInfo("AUTO-UPDATE: omiting update because this is a development environment"); prop.put("candeploy_autoUpdate", "3"); } else if ((downloaded == null) || (!downloaded.exists()) || (downloaded.length() == 0)) { sb.getLog().logInfo("AUTO-UPDATE: omiting update because download failed (file cannot be found or is too small)"); prop.put("candeploy_autoUpdate", "4"); } else { yacyVersion.deployRelease(downloaded); sb.terminate(5000); sb.getLog().logInfo("AUTO-UPDATE: deploy and restart initiated"); prop.put("candeploy_autoUpdate", "1"); } } } if (post.containsKey("configSubmit")) { prop.put("candeploy_configCommit", "1"); sb.setConfig("update.process", (post.get("updateMode", "manual").equals("manual")) ? "manual" : "auto"); sb.setConfig("update.cycle", Math.max(12, post.getLong("cycle", 168))); sb.setConfig("update.blacklist", post.get("blacklist", "")); sb.setConfig("update.concept", (post.get("releaseType", "any").equals("any")) ? "any" : "main"); } } // set if this should be visible if (serverSystem.canExecUnix || serverSystem.isWindows) { // we can deploy a new system with (i.e.) // cd DATA/RELEASE;tar xfz $1;cp -Rf yacy/* ../../;rm -Rf yacy prop.put("candeploy", "1"); } else { prop.put("candeploy", "0"); } // version information String versionstring = yacyVersion.combined2prettyVersion(sb.getConfig("version","0.1")); prop.put("candeploy_versionpp", versionstring); boolean devenvironment = versionstring.startsWith("dev"); double thisVersion = Double.parseDouble(sb.getConfig("version","0.1")); // cut off the SVN Rev in the Version try {thisVersion = Math.round(thisVersion*1000.0)/1000.0;} catch (NumberFormatException e) {} // list downloaded releases yacyVersion release, dflt; String[] downloaded = sb.releasePath.list(); prop.put("candeploy_deployenabled", (downloaded.length == 0) ? "0" : ((devenvironment) ? "1" : "2")); // prevent that a developer-version is over-deployed TreeSet<yacyVersion> downloadedreleases = new TreeSet<yacyVersion>(); for (int j = 0; j < downloaded.length; j++) { try { release = new yacyVersion(downloaded[j]); downloadedreleases.add(release); } catch (RuntimeException e) { // not a valid release // can be also a restart- or deploy-file File invalid = new File(sb.releasePath, downloaded[j]); if (!(invalid.getName().endsWith(".bat") || invalid.getName().endsWith(".sh"))) // Windows & Linux don't like deleted scripts while execution! invalid.deleteOnExit(); } } dflt = (downloadedreleases.size() == 0) ? null : downloadedreleases.last(); Iterator<yacyVersion> i = downloadedreleases.iterator(); int relcount = 0; while (i.hasNext()) { release = i.next(); prop.put("candeploy_downloadedreleases_" + relcount + "_name", ((release.mainRelease) ? "main" : "dev") + " " + release.releaseNr + "/" + release.svn); prop.put("candeploy_downloadedreleases_" + relcount + "_file", release.name); prop.put("candeploy_downloadedreleases_" + relcount + "_selected", (release == dflt) ? "1" : "0"); relcount++; } prop.put("candeploy_downloadedreleases", relcount); // list remotely available releases yacyVersion.DevMain releasess = yacyVersion.allReleases(false); relcount = 0; // main TreeSet<yacyVersion> releases = releasess.main; releases.removeAll(downloadedreleases); i = releases.iterator(); while (i.hasNext()) { release = i.next(); prop.put("candeploy_availreleases_" + relcount + "_name", ((release.mainRelease) ? "main" : "dev") + " " + release.releaseNr + "/" + release.svn); prop.put("candeploy_availreleases_" + relcount + "_url", release.url.toString()); prop.put("candeploy_availreleases_" + relcount + "_selected", "0"); relcount++; } // dev dflt = (releasess.dev.size() == 0) ? null : releasess.dev.last(); releases = releasess.dev; releases.removeAll(downloadedreleases); i = releases.iterator(); while (i.hasNext()) { release = i.next(); prop.put("candeploy_availreleases_" + relcount + "_name", ((release.mainRelease) ? "main" : "dev") + " " + release.releaseNr + "/" + release.svn); prop.put("candeploy_availreleases_" + relcount + "_url", release.url.toString()); prop.put("candeploy_availreleases_" + relcount + "_selected", (release == dflt) ? "1" : "0"); relcount++; } prop.put("candeploy_availreleases", relcount); // properties for automated system update prop.put("candeploy_manualUpdateChecked", (sb.getConfig("update.process", "manual").equals("manual")) ? "1" : "0"); prop.put("candeploy_autoUpdateChecked", (sb.getConfig("update.process", "manual").equals("auto")) ? "1" : "0"); prop.put("candeploy_cycle", sb.getConfigLong("update.cycle", 168)); prop.put("candeploy_blacklist", sb.getConfig("update.blacklist", "")); prop.put("candeploy_releaseTypeMainChecked", (sb.getConfig("update.concept", "any").equals("any")) ? "0" : "1"); prop.put("candeploy_releaseTypeAnyChecked", (sb.getConfig("update.concept", "any").equals("any")) ? "1" : "0"); prop.put("candeploy_lastlookup", (sb.getConfigLong("update.time.lookup", 0) == 0) ? "0" : "1"); prop.put("candeploy_lastlookup_time", new Date(sb.getConfigLong("update.time.lookup", 0)).toString()); prop.put("candeploy_lastdownload", (sb.getConfigLong("update.time.download", 0) == 0) ? "0" : "1"); prop.put("candeploy_lastdownload_time", new Date(sb.getConfigLong("update.time.download", 0)).toString()); prop.put("candeploy_lastdeploy", (sb.getConfigLong("update.time.deploy", 0) == 0) ? "0" : "1"); prop.put("candeploy_lastdeploy_time", new Date(sb.getConfigLong("update.time.deploy", 0)).toString()); /* if ((adminaccess) && (yacyVersion.latestRelease >= (thisVersion+0.01))) { // only new Versions(not new SVN) if ((yacyVersion.latestMainRelease != null) || (yacyVersion.latestDevRelease != null)) { prop.put("hintVersionDownload", 1); } else if ((post != null) && (post.containsKey("aquirerelease"))) { yacyVersion.aquireLatestReleaseInfo(); prop.put("hintVersionDownload", 1); } else { prop.put("hintVersionAvailable", 1); } } prop.put("hintVersionAvailable", 1); // for testing prop.putASIS("hintVersionDownload_versionResMain", (yacyVersion.latestMainRelease == null) ? "-" : yacyVersion.latestMainRelease.toAnchor()); prop.putASIS("hintVersionDownload_versionResDev", (yacyVersion.latestDevRelease == null) ? "-" : yacyVersion.latestDevRelease.toAnchor()); prop.put("hintVersionAvailable_latestVersion", Double.toString(yacyVersion.latestRelease)); */ return prop; }
diff --git a/src/org/opensolaris/opengrok/search/context/Context.java b/src/org/opensolaris/opengrok/search/context/Context.java index dd65bfee..6e3c70f0 100644 --- a/src/org/opensolaris/opengrok/search/context/Context.java +++ b/src/org/opensolaris/opengrok/search/context/Context.java @@ -1,271 +1,271 @@ /* * CDDL HEADER START * * The contents of this file are subject to the terms of the * Common Development and Distribution License (the "License"). * You may not use this file except in compliance with the License. * * See LICENSE.txt included in this distribution for the specific * language governing permissions and limitations under the License. * * When distributing Covered Code, include this CDDL HEADER in each * file and include the License file at LICENSE.txt. * If applicable, add the following below this CDDL HEADER, with the * fields enclosed by brackets "[]" replaced with your own identifying * information: Portions Copyright [yyyy] [name of copyright owner] * * CDDL HEADER END */ /* * Copyright (c) 2005, 2010, Oracle and/or its affiliates. All rights reserved. */ /** * This is supposed to get the matching lines from sourcefile. * since lucene does not easily give the match context. */ package org.opensolaris.opengrok.search.context; import java.io.IOException; import java.io.Reader; import java.io.Writer; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.logging.Level; import org.apache.lucene.search.Query; import org.opensolaris.opengrok.OpenGrokLogger; import org.opensolaris.opengrok.analysis.Definitions; import org.opensolaris.opengrok.configuration.RuntimeEnvironment; import org.opensolaris.opengrok.search.Hit; import org.opensolaris.opengrok.web.Util; public class Context { private final LineMatcher[] m; static final int MAXFILEREAD = 1024 * 1024; private char[] buffer; PlainLineTokenizer tokens; String queryAsURI; private static Set<String> tokenFields = new HashSet<String>(3); static { tokenFields.add("full"); tokenFields.add("refs"); tokenFields.add("defs"); } /** * Constructs a context generator * @param query the query to generate the result for * @param queryStrings map from field names to queries against the fields */ public Context(Query query, Map<String, String> queryStrings) { QueryMatchers qm = new QueryMatchers(); m = qm.getMatchers(query, tokenFields); if (m != null) { buildQueryAsURI(queryStrings); //System.err.println("Found Matchers = "+ m.length + " for " + query); buffer = new char[MAXFILEREAD]; tokens = new PlainLineTokenizer((Reader) null); } } public boolean isEmpty() { return m == null; } /** * Build the {@code queryAsURI} string that holds the query in a form * that's suitable for sending it as part of a URI. * * @param subqueries a map containing the query text for each field */ private void buildQueryAsURI(Map<String, String> subqueries) { boolean first = true; StringBuilder sb = new StringBuilder(); for (Map.Entry<String, String> entry : subqueries.entrySet()) { String field = entry.getKey(); String queryText = entry.getValue(); if (!first) { sb.append('&'); } sb.append(field).append("=").append(Util.URIEncode(queryText)); first = false; } queryAsURI = sb.toString(); } private boolean alt = true; /** * * @param in File to be matched * @param out to write the context * @param morePrefix to link to more... page * @param path path of the file * @param tags format to highlight defs. * @param limit should the number of matching lines be limited? * @return Did it get any matching context? */ public boolean getContext(Reader in, Writer out, String urlPrefix, String morePrefix, String path, Definitions tags, boolean limit, List<Hit> hits) { alt = !alt; if (m == null) { return false; } boolean anything = false; TreeMap<Integer, String[]> matchingTags = null; if (tags != null) { matchingTags = new TreeMap<Integer, String[]>(); try { for (Definitions.Tag tag : tags.getTags()) { for (int i = 0; i < m.length; i++) { if (m[i].match(tag.symbol) == LineMatcher.MATCHED) { /* * desc[1] is line number * desc[2] is type * desc[3] is matching line; */ String[] desc = { tag.symbol, Integer.toString(tag.line), tag.type, tag.text,}; if (in == null) { if (out == null) { Hit hit = new Hit(path, Util.htmlize(desc[3]).replaceAll( desc[0], "<b>" + desc[0] + "</b>"), desc[1], false, alt); hits.add(hit); anything = true; } else { out.write("<a class=\"s\" href=\""); - out.write(urlPrefix); - out.write(path); + out.write(Util.URIEncodePath(urlPrefix)); + out.write(Util.URIEncodePath(path)); out.write("#"); out.write(desc[1]); out.write("\"><span class=\"l\">"); out.write(desc[1]); out.write("</span> "); out.write(Util.htmlize(desc[3]).replaceAll( desc[0], "<b>" + desc[0] + "</b>")); out.write("</a> <i> "); out.write(desc[2]); out.write(" </i><br/>"); anything = true; } } else { matchingTags.put(tag.line, desc); } break; } } } } catch (IOException e) { if (hits != null) { // @todo verify why we ignore all exceptions? OpenGrokLogger.getLogger().log(Level.WARNING, "Could not get context for " + path, e); } } } /** * Just to get the matching tag send a null in */ if (in == null) { return anything; } int charsRead = 0; boolean truncated = false; boolean lim = limit; if (!RuntimeEnvironment.getInstance().isQuickContextScan()) { lim = false; } if (lim) { try { charsRead = in.read(buffer); if (charsRead == MAXFILEREAD) { // we probably only read parts of the file, so set the // truncated flag to enable the [all...] link that // requests all matches truncated = true; // truncate to last line read (don't look more than 100 // characters back) for (int i = charsRead - 1; i > charsRead - 100; i--) { if (buffer[i] == '\n') { charsRead = i; break; } } } } catch (IOException e) { OpenGrokLogger.getLogger().log(Level.WARNING, "An error occured while reading data", e); return anything; } if (charsRead == 0) { return anything; } - tokens.reInit(buffer, charsRead, out, urlPrefix + path + "#", matchingTags); + tokens.reInit(buffer, charsRead, out, Util.URIEncodePath(urlPrefix + path) + "#", matchingTags); } else { - tokens.reInit(in, out, urlPrefix + path + "#", matchingTags); + tokens.reInit(in, out, Util.URIEncodePath(urlPrefix + path) + "#", matchingTags); } if (hits != null) { tokens.setAlt(alt); tokens.setHitList(hits); tokens.setFilename(path); } try { String token; int matchState = LineMatcher.NOT_MATCHED; int matchedLines = 0; while ((token = tokens.yylex()) != null && (!lim || matchedLines < 10)) { for (int i = 0; i < m.length; i++) { matchState = m[i].match(token); if (matchState == LineMatcher.MATCHED) { tokens.printContext(); matchedLines++; //out.write("<br> <i>Matched " + token + " maxlines = " + matchedLines + "</i><br>"); break; } else if (matchState == LineMatcher.WAIT) { tokens.holdOn(); } else { tokens.neverMind(); } } } anything = matchedLines > 0; tokens.dumpRest(); if (lim && (truncated || matchedLines == 10) && out != null) { - out.write("&nbsp; &nbsp; [<a href=\"" + morePrefix + path + "?" + queryAsURI + "\">all</a>...]"); + out.write("&nbsp; &nbsp; [<a href=\"" + Util.URIEncodePath(morePrefix + path) + "?" + queryAsURI + "\">all</a>...]"); } } catch (IOException e) { OpenGrokLogger.getLogger().log(Level.WARNING, "Could not get context for " + path, e); } finally { if (in != null) { try { in.close(); } catch (IOException e) { OpenGrokLogger.getLogger().log(Level.WARNING, "An error occured while closing stream", e); } } if (out != null) { try { out.flush(); } catch (IOException e) { OpenGrokLogger.getLogger().log(Level.WARNING, "An error occured while flushing stream", e); } } } return anything; } }
false
true
public boolean getContext(Reader in, Writer out, String urlPrefix, String morePrefix, String path, Definitions tags, boolean limit, List<Hit> hits) { alt = !alt; if (m == null) { return false; } boolean anything = false; TreeMap<Integer, String[]> matchingTags = null; if (tags != null) { matchingTags = new TreeMap<Integer, String[]>(); try { for (Definitions.Tag tag : tags.getTags()) { for (int i = 0; i < m.length; i++) { if (m[i].match(tag.symbol) == LineMatcher.MATCHED) { /* * desc[1] is line number * desc[2] is type * desc[3] is matching line; */ String[] desc = { tag.symbol, Integer.toString(tag.line), tag.type, tag.text,}; if (in == null) { if (out == null) { Hit hit = new Hit(path, Util.htmlize(desc[3]).replaceAll( desc[0], "<b>" + desc[0] + "</b>"), desc[1], false, alt); hits.add(hit); anything = true; } else { out.write("<a class=\"s\" href=\""); out.write(urlPrefix); out.write(path); out.write("#"); out.write(desc[1]); out.write("\"><span class=\"l\">"); out.write(desc[1]); out.write("</span> "); out.write(Util.htmlize(desc[3]).replaceAll( desc[0], "<b>" + desc[0] + "</b>")); out.write("</a> <i> "); out.write(desc[2]); out.write(" </i><br/>"); anything = true; } } else { matchingTags.put(tag.line, desc); } break; } } } } catch (IOException e) { if (hits != null) { // @todo verify why we ignore all exceptions? OpenGrokLogger.getLogger().log(Level.WARNING, "Could not get context for " + path, e); } } } /** * Just to get the matching tag send a null in */ if (in == null) { return anything; } int charsRead = 0; boolean truncated = false; boolean lim = limit; if (!RuntimeEnvironment.getInstance().isQuickContextScan()) { lim = false; } if (lim) { try { charsRead = in.read(buffer); if (charsRead == MAXFILEREAD) { // we probably only read parts of the file, so set the // truncated flag to enable the [all...] link that // requests all matches truncated = true; // truncate to last line read (don't look more than 100 // characters back) for (int i = charsRead - 1; i > charsRead - 100; i--) { if (buffer[i] == '\n') { charsRead = i; break; } } } } catch (IOException e) { OpenGrokLogger.getLogger().log(Level.WARNING, "An error occured while reading data", e); return anything; } if (charsRead == 0) { return anything; } tokens.reInit(buffer, charsRead, out, urlPrefix + path + "#", matchingTags); } else { tokens.reInit(in, out, urlPrefix + path + "#", matchingTags); } if (hits != null) { tokens.setAlt(alt); tokens.setHitList(hits); tokens.setFilename(path); } try { String token; int matchState = LineMatcher.NOT_MATCHED; int matchedLines = 0; while ((token = tokens.yylex()) != null && (!lim || matchedLines < 10)) { for (int i = 0; i < m.length; i++) { matchState = m[i].match(token); if (matchState == LineMatcher.MATCHED) { tokens.printContext(); matchedLines++; //out.write("<br> <i>Matched " + token + " maxlines = " + matchedLines + "</i><br>"); break; } else if (matchState == LineMatcher.WAIT) { tokens.holdOn(); } else { tokens.neverMind(); } } } anything = matchedLines > 0; tokens.dumpRest(); if (lim && (truncated || matchedLines == 10) && out != null) { out.write("&nbsp; &nbsp; [<a href=\"" + morePrefix + path + "?" + queryAsURI + "\">all</a>...]"); } } catch (IOException e) { OpenGrokLogger.getLogger().log(Level.WARNING, "Could not get context for " + path, e); } finally { if (in != null) { try { in.close(); } catch (IOException e) { OpenGrokLogger.getLogger().log(Level.WARNING, "An error occured while closing stream", e); } } if (out != null) { try { out.flush(); } catch (IOException e) { OpenGrokLogger.getLogger().log(Level.WARNING, "An error occured while flushing stream", e); } } } return anything; }
public boolean getContext(Reader in, Writer out, String urlPrefix, String morePrefix, String path, Definitions tags, boolean limit, List<Hit> hits) { alt = !alt; if (m == null) { return false; } boolean anything = false; TreeMap<Integer, String[]> matchingTags = null; if (tags != null) { matchingTags = new TreeMap<Integer, String[]>(); try { for (Definitions.Tag tag : tags.getTags()) { for (int i = 0; i < m.length; i++) { if (m[i].match(tag.symbol) == LineMatcher.MATCHED) { /* * desc[1] is line number * desc[2] is type * desc[3] is matching line; */ String[] desc = { tag.symbol, Integer.toString(tag.line), tag.type, tag.text,}; if (in == null) { if (out == null) { Hit hit = new Hit(path, Util.htmlize(desc[3]).replaceAll( desc[0], "<b>" + desc[0] + "</b>"), desc[1], false, alt); hits.add(hit); anything = true; } else { out.write("<a class=\"s\" href=\""); out.write(Util.URIEncodePath(urlPrefix)); out.write(Util.URIEncodePath(path)); out.write("#"); out.write(desc[1]); out.write("\"><span class=\"l\">"); out.write(desc[1]); out.write("</span> "); out.write(Util.htmlize(desc[3]).replaceAll( desc[0], "<b>" + desc[0] + "</b>")); out.write("</a> <i> "); out.write(desc[2]); out.write(" </i><br/>"); anything = true; } } else { matchingTags.put(tag.line, desc); } break; } } } } catch (IOException e) { if (hits != null) { // @todo verify why we ignore all exceptions? OpenGrokLogger.getLogger().log(Level.WARNING, "Could not get context for " + path, e); } } } /** * Just to get the matching tag send a null in */ if (in == null) { return anything; } int charsRead = 0; boolean truncated = false; boolean lim = limit; if (!RuntimeEnvironment.getInstance().isQuickContextScan()) { lim = false; } if (lim) { try { charsRead = in.read(buffer); if (charsRead == MAXFILEREAD) { // we probably only read parts of the file, so set the // truncated flag to enable the [all...] link that // requests all matches truncated = true; // truncate to last line read (don't look more than 100 // characters back) for (int i = charsRead - 1; i > charsRead - 100; i--) { if (buffer[i] == '\n') { charsRead = i; break; } } } } catch (IOException e) { OpenGrokLogger.getLogger().log(Level.WARNING, "An error occured while reading data", e); return anything; } if (charsRead == 0) { return anything; } tokens.reInit(buffer, charsRead, out, Util.URIEncodePath(urlPrefix + path) + "#", matchingTags); } else { tokens.reInit(in, out, Util.URIEncodePath(urlPrefix + path) + "#", matchingTags); } if (hits != null) { tokens.setAlt(alt); tokens.setHitList(hits); tokens.setFilename(path); } try { String token; int matchState = LineMatcher.NOT_MATCHED; int matchedLines = 0; while ((token = tokens.yylex()) != null && (!lim || matchedLines < 10)) { for (int i = 0; i < m.length; i++) { matchState = m[i].match(token); if (matchState == LineMatcher.MATCHED) { tokens.printContext(); matchedLines++; //out.write("<br> <i>Matched " + token + " maxlines = " + matchedLines + "</i><br>"); break; } else if (matchState == LineMatcher.WAIT) { tokens.holdOn(); } else { tokens.neverMind(); } } } anything = matchedLines > 0; tokens.dumpRest(); if (lim && (truncated || matchedLines == 10) && out != null) { out.write("&nbsp; &nbsp; [<a href=\"" + Util.URIEncodePath(morePrefix + path) + "?" + queryAsURI + "\">all</a>...]"); } } catch (IOException e) { OpenGrokLogger.getLogger().log(Level.WARNING, "Could not get context for " + path, e); } finally { if (in != null) { try { in.close(); } catch (IOException e) { OpenGrokLogger.getLogger().log(Level.WARNING, "An error occured while closing stream", e); } } if (out != null) { try { out.flush(); } catch (IOException e) { OpenGrokLogger.getLogger().log(Level.WARNING, "An error occured while flushing stream", e); } } } return anything; }
diff --git a/src/org/sakaiproject/tool/assessment/ui/listener/evaluation/TotalScoreListener.java b/src/org/sakaiproject/tool/assessment/ui/listener/evaluation/TotalScoreListener.java index 8c0d40deb..44d831c35 100755 --- a/src/org/sakaiproject/tool/assessment/ui/listener/evaluation/TotalScoreListener.java +++ b/src/org/sakaiproject/tool/assessment/ui/listener/evaluation/TotalScoreListener.java @@ -1,367 +1,367 @@ /********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2004-2005 The Regents of the University of Michigan, Trustees of Indiana University, * Board of Trustees of the Leland Stanford, Jr., University, and The MIT Corporation * * Licensed under the Educational Community License Version 1.0 (the "License"); * By obtaining, using and/or copying this Original Work, you agree that you have read, * understand, and will comply with the terms and conditions of the Educational Community License. * You may obtain a copy of the License at: * * http://cvs.sakaiproject.org/licenses/license_1_0.html * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE * AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * **********************************************************************************/ package org.sakaiproject.tool.assessment.ui.listener.evaluation; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import javax.faces.context.FacesContext; import javax.faces.event.AbortProcessingException; import javax.faces.event.ActionEvent; import javax.faces.event.ActionListener; import javax.faces.event.ValueChangeEvent; import javax.faces.event.ValueChangeListener; import org.apache.commons.beanutils.BeanUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.sakaiproject.tool.assessment.business.entity.RecordingData; import org.sakaiproject.tool.assessment.data.dao.assessment.AssessmentAccessControl; import org.sakaiproject.tool.assessment.data.dao.assessment.EvaluationModel; import org.sakaiproject.tool.assessment.data.dao.assessment.PublishedAssessmentData; import org.sakaiproject.tool.assessment.data.dao.grading.AssessmentGradingData; import org.sakaiproject.tool.assessment.data.dao.grading.ItemGradingData; import org.sakaiproject.tool.assessment.data.ifc.assessment.ItemDataIfc; import org.sakaiproject.tool.assessment.data.ifc.assessment.SectionDataIfc; import org.sakaiproject.tool.assessment.facade.AgentFacade; import org.sakaiproject.tool.assessment.services.GradingService; import org.sakaiproject.tool.assessment.services.PersistenceService; import org.sakaiproject.tool.assessment.ui.bean.evaluation.AgentResults; import org.sakaiproject.tool.assessment.ui.bean.evaluation.TotalScoresBean; import org.sakaiproject.tool.assessment.ui.listener.evaluation.util.EvaluationListenerUtil; import org.sakaiproject.tool.assessment.ui.listener.util.ContextUtil; import org.sakaiproject.tool.assessment.util.BeanSort; // end testing /** * <p> * This handles the selection of the Total Score entry page. * </p> * <p>Description: Action Listener for Evaluation Total Score front door</p> * <p>Copyright: Copyright (c) 2004</p> * <p>Organization: Sakai Project</p> * @author Ed Smiley * @version $Id$ */ public class TotalScoreListener implements ActionListener, ValueChangeListener { private static Log log = LogFactory.getLog(TotalScoreListener.class); private static EvaluationListenerUtil util; private static BeanSort bs; private static ContextUtil cu; /** * Standard process action method. * @param ae ActionEvent * @throws AbortProcessingException */ public void processAction(ActionEvent ae) throws AbortProcessingException { FacesContext context = FacesContext.getCurrentInstance(); Map reqMap = context.getExternalContext().getRequestMap(); Map requestParams = context.getExternalContext(). getRequestParameterMap(); //System.out.println("requestParams: " + requestParams); //System.out.println("reqMap: " + reqMap); log.info("TotalScore LISTENER."); TotalScoresBean bean = (TotalScoresBean) cu.lookupBean("totalScores"); // we probably want to change the poster to be consistent String publishedId = cu.lookupParam("publishedId"); //System.out.println("Got publishedId " + publishedId); log.info("Calling totalScores."); if (!totalScores(publishedId, bean, false)) { //throw new RuntimeException("failed to call totalScores."); } } /** * Process a value change. */ public void processValueChange(ValueChangeEvent event) { FacesContext context = FacesContext.getCurrentInstance(); Map reqMap = context.getExternalContext().getRequestMap(); Map requestParams = context.getExternalContext(). getRequestParameterMap(); //System.out.println("requestParams: " + requestParams); //System.out.println("reqMap: " + reqMap); log.info("TotalScore CHANGE LISTENER."); TotalScoresBean bean = (TotalScoresBean) cu.lookupBean("totalScores"); // we probably want to change the poster to be consistent String publishedId = cu.lookupParam("publishedId"); //System.out.println("Got publishedId " + publishedId); log.info("Calling totalScores."); if (!totalScores(publishedId, bean, true)) { //throw new RuntimeException("failed to call totalScores."); } } /** * This will populate the TotalScoresBean with the data associated with the * particular versioned assessment based on the publishedId. * * @todo Some of this code will change when we move this to Hibernate persistence. * @param publishedId String * @param bean TotalScoresBean * @return boolean */ public boolean totalScores( String publishedId, TotalScoresBean bean, boolean isValueChange) { log.debug("totalScores()"); try { GradingService delegate = new GradingService(); if (cu.lookupParam("sortBy") != null && !cu.lookupParam("sortBy").trim().equals("")) bean.setSortType(cu.lookupParam("sortBy")); String which = cu.lookupParam("allSubmissions"); //System.out.println("Rachel: allSubmissions = " + which); if (which == null) which = "false"; bean.setAllSubmissions(which); bean.setPublishedId(publishedId); ArrayList scores = delegate.getTotalScores(publishedId, which); Iterator iter = scores.iterator(); //System.out.println("Has this many agents: " + scores.size()); if (!iter.hasNext()) return false; Object next = iter.next(); Date dueDate = null; // Okay, here we get the first result set, which has a summary of // information and a pointer to the graded assessment we should be // displaying. We get the graded assessment. AssessmentGradingData data = (AssessmentGradingData) next; if (data.getPublishedAssessment() != null) { bean.setAssessmentName(data.getPublishedAssessment().getTitle()); // if section set is null, initialize it - daisyf , 01/31/05 PublishedAssessmentData pub = (PublishedAssessmentData)data.getPublishedAssessment(); HashSet sectionSet = PersistenceService.getInstance(). getPublishedAssessmentFacadeQueries().getSectionSetForAssessment(pub); data.getPublishedAssessment().setSectionSet(sectionSet); // Set first item for question scores. This can be complicated. // It's important because it simplifies Question Scores to do this // once and keep track of it -- the data is available here, and // not there. If firstItem is "", there are no items with // answers, and the QuestionScores and Histograms pages don't // show. This is a very weird case, but has to be handled. String firstitem = ""; HashMap answeredItems = new HashMap(); Iterator i2 = scores.iterator(); while (i2.hasNext()) { AssessmentGradingData agd = (AssessmentGradingData) i2.next(); Iterator i3 = agd.getItemGradingSet().iterator(); while (i3.hasNext()) { ItemGradingData igd = (ItemGradingData) i3.next(); answeredItems.put(igd.getPublishedItem().getItemId(), "true"); } } bean.setAnsweredItems(answeredItems); // Save for QuestionScores boolean foundid = false; i2 = data.getPublishedAssessment().getSectionArraySorted().iterator(); while (i2.hasNext() && !foundid) { SectionDataIfc sdata = (SectionDataIfc) i2.next(); Iterator i3 = sdata.getItemArraySortedForGrading().iterator(); while (i3.hasNext() && !foundid) { ItemDataIfc idata = (ItemDataIfc) i3.next(); if (answeredItems.get(idata.getItemId()) != null) { bean.setFirstItem(idata.getItemId().toString()); foundid = true; } } } //System.out.println("Rachel: Setting first item to " + // bean.getFirstItem()); try { bean.setAnonymous((data.getPublishedAssessment().getEvaluationModel().getAnonymousGrading().equals(EvaluationModel.ANONYMOUS_GRADING)?"true":"false")); //System.out.println("Set anonymous = " + bean.getAnonymous()); } catch (Exception e) { //System.out.println("No evaluation model"); bean.setAnonymous("false"); } try { bean.setLateHandling(data.getPublishedAssessment().getAssessmentAccessControl().getLateHandling().toString()); } catch (Exception e) { //System.out.println("No access control model."); bean.setLateHandling(AssessmentAccessControl.NOT_ACCEPT_LATE_SUBMISSION.toString()); } try { bean.setDueDate(data.getPublishedAssessment().getAssessmentAccessControl().getDueDate().toString()); dueDate = data.getPublishedAssessment().getAssessmentAccessControl().getDueDate(); } catch (Exception e) { //System.out.println("No due date."); bean.setDueDate(""); dueDate = null; } try { bean.setMaxScore(data.getPublishedAssessment().getEvaluationModel().getFixedTotalScore().toString()); } catch (Exception e) { float score = (float) 0.0; Iterator iter2 = data.getPublishedAssessment().getSectionArraySorted().iterator(); while (iter2.hasNext()) { SectionDataIfc sdata = (SectionDataIfc) iter2.next(); Iterator iter3 = sdata.getItemArraySortedForGrading().iterator(); while (iter3.hasNext()) { ItemDataIfc idata = (ItemDataIfc) iter3.next(); score += idata.getScore().floatValue(); } } bean.setMaxScore(new Float(score).toString()); } } if (cu.lookupParam("roleSelection") != null) { bean.setRoleSelection(cu.lookupParam("roleSelection")); } if (bean.getSortType() == null) { if (bean.getAnonymous().equals("true")) { bean.setSortType("totalAutoScore"); } else { bean.setSortType("lastName"); } } // recordingData encapsulates the inbeanation needed for recording. // set recording agent, agent assessmentId, // set course_assignment_context value // set max tries (0=unlimited), and 30 seconds max length String courseContext = bean.getAssessmentName() + " total "; // Note this is HTTP-centric right now, we can't use in Faces // AuthoringHelper authoringHelper = new AuthoringHelper(); // authoringHelper.getRemoteUserID() needs servlet stuff // authoringHelper.getRemoteUserName() needs servlet stuff String userId = ""; String userName = ""; RecordingData recordingData = new RecordingData( userId, userName, courseContext, "0", "30"); // set this value in the requestMap for sound recorder bean.setRecordingData(recordingData); /* Dump the grading and agent information into AgentResults */ ArrayList agents = new ArrayList(); iter = scores.iterator(); while (iter.hasNext()) { AgentResults results = new AgentResults(); AssessmentGradingData gdata = (AssessmentGradingData) iter.next(); BeanUtils.copyProperties(results, gdata); results.setAssessmentGradingId(gdata.getAssessmentGradingId()); results.setTotalAutoScore(gdata.getTotalAutoScore().toString()); results.setTotalOverrideScore(gdata.getTotalOverrideScore().toString()); results.setFinalScore(gdata.getFinalScore().toString()); results.setComments(gdata.getComments()); if (dueDate == null || gdata.getSubmittedDate().before(dueDate)) results.setIsLate(new Boolean(false)); else results.setIsLate(new Boolean(true)); AgentFacade agent = new AgentFacade(gdata.getAgentId()); //System.out.println("Rachel: agentid = " + gdata.getAgentId()); results.setLastName(agent.getLastName()); results.setFirstName(agent.getFirstName()); if (results.getLastName() != null && results.getLastName().length() > 0) results.setLastInitial(results.getLastName().substring(0,1)); else if (results.getFirstName() != null && results.getFirstName().length() > 0) results.setLastInitial(results.getFirstName().substring(0,1)); else - results.setLastInitial("A"); + results.setLastInitial("Anonymous"); results.setIdString(agent.getIdString()); results.setRole(agent.getRole()); agents.add(results); } //System.out.println("Sort type is " + bean.getSortType() + "."); bs = new BeanSort(agents, bean.getSortType()); if ( (bean.getSortType()).equals("assessmentGradingId") || (bean.getSortType()).equals("totalAutoScore") || (bean.getSortType()).equals("totalOverrideScore") || (bean.getSortType()).equals("finalScore")) { bs.toNumericSort(); } else { bs.toStringSort(); } bs.sort(); //System.out.println("Listing agents."); bean.setAgents(agents); bean.setTotalPeople(new Integer(bean.getAgents().size()).toString()); } catch (Exception e) { e.printStackTrace(); return false; } return true; } }
true
true
public boolean totalScores( String publishedId, TotalScoresBean bean, boolean isValueChange) { log.debug("totalScores()"); try { GradingService delegate = new GradingService(); if (cu.lookupParam("sortBy") != null && !cu.lookupParam("sortBy").trim().equals("")) bean.setSortType(cu.lookupParam("sortBy")); String which = cu.lookupParam("allSubmissions"); //System.out.println("Rachel: allSubmissions = " + which); if (which == null) which = "false"; bean.setAllSubmissions(which); bean.setPublishedId(publishedId); ArrayList scores = delegate.getTotalScores(publishedId, which); Iterator iter = scores.iterator(); //System.out.println("Has this many agents: " + scores.size()); if (!iter.hasNext()) return false; Object next = iter.next(); Date dueDate = null; // Okay, here we get the first result set, which has a summary of // information and a pointer to the graded assessment we should be // displaying. We get the graded assessment. AssessmentGradingData data = (AssessmentGradingData) next; if (data.getPublishedAssessment() != null) { bean.setAssessmentName(data.getPublishedAssessment().getTitle()); // if section set is null, initialize it - daisyf , 01/31/05 PublishedAssessmentData pub = (PublishedAssessmentData)data.getPublishedAssessment(); HashSet sectionSet = PersistenceService.getInstance(). getPublishedAssessmentFacadeQueries().getSectionSetForAssessment(pub); data.getPublishedAssessment().setSectionSet(sectionSet); // Set first item for question scores. This can be complicated. // It's important because it simplifies Question Scores to do this // once and keep track of it -- the data is available here, and // not there. If firstItem is "", there are no items with // answers, and the QuestionScores and Histograms pages don't // show. This is a very weird case, but has to be handled. String firstitem = ""; HashMap answeredItems = new HashMap(); Iterator i2 = scores.iterator(); while (i2.hasNext()) { AssessmentGradingData agd = (AssessmentGradingData) i2.next(); Iterator i3 = agd.getItemGradingSet().iterator(); while (i3.hasNext()) { ItemGradingData igd = (ItemGradingData) i3.next(); answeredItems.put(igd.getPublishedItem().getItemId(), "true"); } } bean.setAnsweredItems(answeredItems); // Save for QuestionScores boolean foundid = false; i2 = data.getPublishedAssessment().getSectionArraySorted().iterator(); while (i2.hasNext() && !foundid) { SectionDataIfc sdata = (SectionDataIfc) i2.next(); Iterator i3 = sdata.getItemArraySortedForGrading().iterator(); while (i3.hasNext() && !foundid) { ItemDataIfc idata = (ItemDataIfc) i3.next(); if (answeredItems.get(idata.getItemId()) != null) { bean.setFirstItem(idata.getItemId().toString()); foundid = true; } } } //System.out.println("Rachel: Setting first item to " + // bean.getFirstItem()); try { bean.setAnonymous((data.getPublishedAssessment().getEvaluationModel().getAnonymousGrading().equals(EvaluationModel.ANONYMOUS_GRADING)?"true":"false")); //System.out.println("Set anonymous = " + bean.getAnonymous()); } catch (Exception e) { //System.out.println("No evaluation model"); bean.setAnonymous("false"); } try { bean.setLateHandling(data.getPublishedAssessment().getAssessmentAccessControl().getLateHandling().toString()); } catch (Exception e) { //System.out.println("No access control model."); bean.setLateHandling(AssessmentAccessControl.NOT_ACCEPT_LATE_SUBMISSION.toString()); } try { bean.setDueDate(data.getPublishedAssessment().getAssessmentAccessControl().getDueDate().toString()); dueDate = data.getPublishedAssessment().getAssessmentAccessControl().getDueDate(); } catch (Exception e) { //System.out.println("No due date."); bean.setDueDate(""); dueDate = null; } try { bean.setMaxScore(data.getPublishedAssessment().getEvaluationModel().getFixedTotalScore().toString()); } catch (Exception e) { float score = (float) 0.0; Iterator iter2 = data.getPublishedAssessment().getSectionArraySorted().iterator(); while (iter2.hasNext()) { SectionDataIfc sdata = (SectionDataIfc) iter2.next(); Iterator iter3 = sdata.getItemArraySortedForGrading().iterator(); while (iter3.hasNext()) { ItemDataIfc idata = (ItemDataIfc) iter3.next(); score += idata.getScore().floatValue(); } } bean.setMaxScore(new Float(score).toString()); } } if (cu.lookupParam("roleSelection") != null) { bean.setRoleSelection(cu.lookupParam("roleSelection")); } if (bean.getSortType() == null) { if (bean.getAnonymous().equals("true")) { bean.setSortType("totalAutoScore"); } else { bean.setSortType("lastName"); } } // recordingData encapsulates the inbeanation needed for recording. // set recording agent, agent assessmentId, // set course_assignment_context value // set max tries (0=unlimited), and 30 seconds max length String courseContext = bean.getAssessmentName() + " total "; // Note this is HTTP-centric right now, we can't use in Faces // AuthoringHelper authoringHelper = new AuthoringHelper(); // authoringHelper.getRemoteUserID() needs servlet stuff // authoringHelper.getRemoteUserName() needs servlet stuff String userId = ""; String userName = ""; RecordingData recordingData = new RecordingData( userId, userName, courseContext, "0", "30"); // set this value in the requestMap for sound recorder bean.setRecordingData(recordingData); /* Dump the grading and agent information into AgentResults */ ArrayList agents = new ArrayList(); iter = scores.iterator(); while (iter.hasNext()) { AgentResults results = new AgentResults(); AssessmentGradingData gdata = (AssessmentGradingData) iter.next(); BeanUtils.copyProperties(results, gdata); results.setAssessmentGradingId(gdata.getAssessmentGradingId()); results.setTotalAutoScore(gdata.getTotalAutoScore().toString()); results.setTotalOverrideScore(gdata.getTotalOverrideScore().toString()); results.setFinalScore(gdata.getFinalScore().toString()); results.setComments(gdata.getComments()); if (dueDate == null || gdata.getSubmittedDate().before(dueDate)) results.setIsLate(new Boolean(false)); else results.setIsLate(new Boolean(true)); AgentFacade agent = new AgentFacade(gdata.getAgentId()); //System.out.println("Rachel: agentid = " + gdata.getAgentId()); results.setLastName(agent.getLastName()); results.setFirstName(agent.getFirstName()); if (results.getLastName() != null && results.getLastName().length() > 0) results.setLastInitial(results.getLastName().substring(0,1)); else if (results.getFirstName() != null && results.getFirstName().length() > 0) results.setLastInitial(results.getFirstName().substring(0,1)); else results.setLastInitial("A"); results.setIdString(agent.getIdString()); results.setRole(agent.getRole()); agents.add(results); } //System.out.println("Sort type is " + bean.getSortType() + "."); bs = new BeanSort(agents, bean.getSortType()); if ( (bean.getSortType()).equals("assessmentGradingId") || (bean.getSortType()).equals("totalAutoScore") || (bean.getSortType()).equals("totalOverrideScore") || (bean.getSortType()).equals("finalScore")) { bs.toNumericSort(); } else { bs.toStringSort(); } bs.sort(); //System.out.println("Listing agents."); bean.setAgents(agents); bean.setTotalPeople(new Integer(bean.getAgents().size()).toString()); } catch (Exception e) { e.printStackTrace(); return false; } return true; }
public boolean totalScores( String publishedId, TotalScoresBean bean, boolean isValueChange) { log.debug("totalScores()"); try { GradingService delegate = new GradingService(); if (cu.lookupParam("sortBy") != null && !cu.lookupParam("sortBy").trim().equals("")) bean.setSortType(cu.lookupParam("sortBy")); String which = cu.lookupParam("allSubmissions"); //System.out.println("Rachel: allSubmissions = " + which); if (which == null) which = "false"; bean.setAllSubmissions(which); bean.setPublishedId(publishedId); ArrayList scores = delegate.getTotalScores(publishedId, which); Iterator iter = scores.iterator(); //System.out.println("Has this many agents: " + scores.size()); if (!iter.hasNext()) return false; Object next = iter.next(); Date dueDate = null; // Okay, here we get the first result set, which has a summary of // information and a pointer to the graded assessment we should be // displaying. We get the graded assessment. AssessmentGradingData data = (AssessmentGradingData) next; if (data.getPublishedAssessment() != null) { bean.setAssessmentName(data.getPublishedAssessment().getTitle()); // if section set is null, initialize it - daisyf , 01/31/05 PublishedAssessmentData pub = (PublishedAssessmentData)data.getPublishedAssessment(); HashSet sectionSet = PersistenceService.getInstance(). getPublishedAssessmentFacadeQueries().getSectionSetForAssessment(pub); data.getPublishedAssessment().setSectionSet(sectionSet); // Set first item for question scores. This can be complicated. // It's important because it simplifies Question Scores to do this // once and keep track of it -- the data is available here, and // not there. If firstItem is "", there are no items with // answers, and the QuestionScores and Histograms pages don't // show. This is a very weird case, but has to be handled. String firstitem = ""; HashMap answeredItems = new HashMap(); Iterator i2 = scores.iterator(); while (i2.hasNext()) { AssessmentGradingData agd = (AssessmentGradingData) i2.next(); Iterator i3 = agd.getItemGradingSet().iterator(); while (i3.hasNext()) { ItemGradingData igd = (ItemGradingData) i3.next(); answeredItems.put(igd.getPublishedItem().getItemId(), "true"); } } bean.setAnsweredItems(answeredItems); // Save for QuestionScores boolean foundid = false; i2 = data.getPublishedAssessment().getSectionArraySorted().iterator(); while (i2.hasNext() && !foundid) { SectionDataIfc sdata = (SectionDataIfc) i2.next(); Iterator i3 = sdata.getItemArraySortedForGrading().iterator(); while (i3.hasNext() && !foundid) { ItemDataIfc idata = (ItemDataIfc) i3.next(); if (answeredItems.get(idata.getItemId()) != null) { bean.setFirstItem(idata.getItemId().toString()); foundid = true; } } } //System.out.println("Rachel: Setting first item to " + // bean.getFirstItem()); try { bean.setAnonymous((data.getPublishedAssessment().getEvaluationModel().getAnonymousGrading().equals(EvaluationModel.ANONYMOUS_GRADING)?"true":"false")); //System.out.println("Set anonymous = " + bean.getAnonymous()); } catch (Exception e) { //System.out.println("No evaluation model"); bean.setAnonymous("false"); } try { bean.setLateHandling(data.getPublishedAssessment().getAssessmentAccessControl().getLateHandling().toString()); } catch (Exception e) { //System.out.println("No access control model."); bean.setLateHandling(AssessmentAccessControl.NOT_ACCEPT_LATE_SUBMISSION.toString()); } try { bean.setDueDate(data.getPublishedAssessment().getAssessmentAccessControl().getDueDate().toString()); dueDate = data.getPublishedAssessment().getAssessmentAccessControl().getDueDate(); } catch (Exception e) { //System.out.println("No due date."); bean.setDueDate(""); dueDate = null; } try { bean.setMaxScore(data.getPublishedAssessment().getEvaluationModel().getFixedTotalScore().toString()); } catch (Exception e) { float score = (float) 0.0; Iterator iter2 = data.getPublishedAssessment().getSectionArraySorted().iterator(); while (iter2.hasNext()) { SectionDataIfc sdata = (SectionDataIfc) iter2.next(); Iterator iter3 = sdata.getItemArraySortedForGrading().iterator(); while (iter3.hasNext()) { ItemDataIfc idata = (ItemDataIfc) iter3.next(); score += idata.getScore().floatValue(); } } bean.setMaxScore(new Float(score).toString()); } } if (cu.lookupParam("roleSelection") != null) { bean.setRoleSelection(cu.lookupParam("roleSelection")); } if (bean.getSortType() == null) { if (bean.getAnonymous().equals("true")) { bean.setSortType("totalAutoScore"); } else { bean.setSortType("lastName"); } } // recordingData encapsulates the inbeanation needed for recording. // set recording agent, agent assessmentId, // set course_assignment_context value // set max tries (0=unlimited), and 30 seconds max length String courseContext = bean.getAssessmentName() + " total "; // Note this is HTTP-centric right now, we can't use in Faces // AuthoringHelper authoringHelper = new AuthoringHelper(); // authoringHelper.getRemoteUserID() needs servlet stuff // authoringHelper.getRemoteUserName() needs servlet stuff String userId = ""; String userName = ""; RecordingData recordingData = new RecordingData( userId, userName, courseContext, "0", "30"); // set this value in the requestMap for sound recorder bean.setRecordingData(recordingData); /* Dump the grading and agent information into AgentResults */ ArrayList agents = new ArrayList(); iter = scores.iterator(); while (iter.hasNext()) { AgentResults results = new AgentResults(); AssessmentGradingData gdata = (AssessmentGradingData) iter.next(); BeanUtils.copyProperties(results, gdata); results.setAssessmentGradingId(gdata.getAssessmentGradingId()); results.setTotalAutoScore(gdata.getTotalAutoScore().toString()); results.setTotalOverrideScore(gdata.getTotalOverrideScore().toString()); results.setFinalScore(gdata.getFinalScore().toString()); results.setComments(gdata.getComments()); if (dueDate == null || gdata.getSubmittedDate().before(dueDate)) results.setIsLate(new Boolean(false)); else results.setIsLate(new Boolean(true)); AgentFacade agent = new AgentFacade(gdata.getAgentId()); //System.out.println("Rachel: agentid = " + gdata.getAgentId()); results.setLastName(agent.getLastName()); results.setFirstName(agent.getFirstName()); if (results.getLastName() != null && results.getLastName().length() > 0) results.setLastInitial(results.getLastName().substring(0,1)); else if (results.getFirstName() != null && results.getFirstName().length() > 0) results.setLastInitial(results.getFirstName().substring(0,1)); else results.setLastInitial("Anonymous"); results.setIdString(agent.getIdString()); results.setRole(agent.getRole()); agents.add(results); } //System.out.println("Sort type is " + bean.getSortType() + "."); bs = new BeanSort(agents, bean.getSortType()); if ( (bean.getSortType()).equals("assessmentGradingId") || (bean.getSortType()).equals("totalAutoScore") || (bean.getSortType()).equals("totalOverrideScore") || (bean.getSortType()).equals("finalScore")) { bs.toNumericSort(); } else { bs.toStringSort(); } bs.sort(); //System.out.println("Listing agents."); bean.setAgents(agents); bean.setTotalPeople(new Integer(bean.getAgents().size()).toString()); } catch (Exception e) { e.printStackTrace(); return false; } return true; }
diff --git a/pset1/GraphGenerator.java b/pset1/GraphGenerator.java index fd49038..514f444 100644 --- a/pset1/GraphGenerator.java +++ b/pset1/GraphGenerator.java @@ -1,59 +1,67 @@ package pset1; import org.apache.bcel.Repository; import org.apache.bcel.classfile.JavaClass; import org.apache.bcel.classfile.Method; import org.apache.bcel.generic.ClassGen; import org.apache.bcel.generic.ConstantPoolGen; import org.apache.bcel.generic.INVOKEVIRTUAL; import org.apache.bcel.generic.Instruction; import org.apache.bcel.generic.InstructionHandle; import org.apache.bcel.generic.InstructionList; import org.apache.bcel.generic.InstructionTargeter; import org.apache.bcel.generic.MethodGen; import org.apache.bcel.generic.BranchInstruction; public class GraphGenerator { CFG createCFG(String className) throws ClassNotFoundException { CFG cfg = new CFG(); JavaClass jc = Repository.lookupClass(className); ClassGen cg = new ClassGen(jc); ConstantPoolGen cpg = cg.getConstantPool(); for (Method m: cg.getMethods()){ MethodGen mg = new MethodGen(m, cg.getClassName(), cpg); InstructionList il = mg.getInstructionList(); InstructionHandle[] handles = il.getInstructionHandles(); for(InstructionHandle ih: handles){ int position = ih.getPosition(); cfg.addNode(position, m, jc); Instruction instr = ih.getInstruction(); // adding edges to instructions/nodes // ignore branching instructions (jsr[_w], *switch) InstructionHandle instr_next = ih.getNext(); // if statement BranchInstruction bi = null; - if (instr.toString().indexOf("if") != -1){ +// if (instr.toString().indexOf("goto") != -1){ +// bi = (BranchInstruction) instr; +// InstructionHandle if_ih = bi.getTarget(); +// int imm_pos = if_ih.getPosition(); +// int a = if_ih.getPosition(); +// cfg.addEdge(position, m, jc, if_ih.getPosition(), m, jc); +// } + if (instr instanceof BranchInstruction){ bi = (BranchInstruction) instr; InstructionHandle if_ih = bi.getTarget(); - cfg.addEdge(position, m, jc, if_ih.getPosition(), m, jc); +; int npos = if_ih.getPosition(); + cfg.addEdge(position, m, jc, npos, m, jc); } - if (instr_next != null){ + if ((instr_next != null) && (instr.toString().indexOf("goto") == -1)){ int nextPosition = instr_next.getPosition(); cfg.addEdge(position, m, jc, nextPosition, m, jc); } } } return cfg; } public static void main(String[] a ) throws ClassNotFoundException { CFG cfg = new GraphGenerator().createCFG("pset1.Test2"); cfg.printDB(); } }
false
true
CFG createCFG(String className) throws ClassNotFoundException { CFG cfg = new CFG(); JavaClass jc = Repository.lookupClass(className); ClassGen cg = new ClassGen(jc); ConstantPoolGen cpg = cg.getConstantPool(); for (Method m: cg.getMethods()){ MethodGen mg = new MethodGen(m, cg.getClassName(), cpg); InstructionList il = mg.getInstructionList(); InstructionHandle[] handles = il.getInstructionHandles(); for(InstructionHandle ih: handles){ int position = ih.getPosition(); cfg.addNode(position, m, jc); Instruction instr = ih.getInstruction(); // adding edges to instructions/nodes // ignore branching instructions (jsr[_w], *switch) InstructionHandle instr_next = ih.getNext(); // if statement BranchInstruction bi = null; if (instr.toString().indexOf("if") != -1){ bi = (BranchInstruction) instr; InstructionHandle if_ih = bi.getTarget(); cfg.addEdge(position, m, jc, if_ih.getPosition(), m, jc); } if (instr_next != null){ int nextPosition = instr_next.getPosition(); cfg.addEdge(position, m, jc, nextPosition, m, jc); } } } return cfg; }
CFG createCFG(String className) throws ClassNotFoundException { CFG cfg = new CFG(); JavaClass jc = Repository.lookupClass(className); ClassGen cg = new ClassGen(jc); ConstantPoolGen cpg = cg.getConstantPool(); for (Method m: cg.getMethods()){ MethodGen mg = new MethodGen(m, cg.getClassName(), cpg); InstructionList il = mg.getInstructionList(); InstructionHandle[] handles = il.getInstructionHandles(); for(InstructionHandle ih: handles){ int position = ih.getPosition(); cfg.addNode(position, m, jc); Instruction instr = ih.getInstruction(); // adding edges to instructions/nodes // ignore branching instructions (jsr[_w], *switch) InstructionHandle instr_next = ih.getNext(); // if statement BranchInstruction bi = null; // if (instr.toString().indexOf("goto") != -1){ // bi = (BranchInstruction) instr; // InstructionHandle if_ih = bi.getTarget(); // int imm_pos = if_ih.getPosition(); // int a = if_ih.getPosition(); // cfg.addEdge(position, m, jc, if_ih.getPosition(), m, jc); // } if (instr instanceof BranchInstruction){ bi = (BranchInstruction) instr; InstructionHandle if_ih = bi.getTarget(); ; int npos = if_ih.getPosition(); cfg.addEdge(position, m, jc, npos, m, jc); } if ((instr_next != null) && (instr.toString().indexOf("goto") == -1)){ int nextPosition = instr_next.getPosition(); cfg.addEdge(position, m, jc, nextPosition, m, jc); } } } return cfg; }
diff --git a/src/de/bdh/ks/KSLang.java b/src/de/bdh/ks/KSLang.java index 9e08168..c363e26 100644 --- a/src/de/bdh/ks/KSLang.java +++ b/src/de/bdh/ks/KSLang.java @@ -1,168 +1,168 @@ package de.bdh.ks; import java.util.HashMap; import org.bukkit.ChatColor; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; public class KSLang { HashMap<String,HashMap<String,String>> lng = new HashMap<String,HashMap<String,String>>();; String lang = ""; public KSLang() { HashMap<String,String> en = new HashMap<String,String>(); HashMap<String,String> de = new HashMap<String,String>(); en.put("usage", "USAGE: /auction SELL/BUY/REQUEST/DETAIL/LIST/SIGN/LISTREQUESTS/COLLECT/ABORT/ABORTREQUEST/OVERVIEW/OVERVIEWREQUEST"); en.put("usage_abort","USAGE: /auction abort ID - you can get the id by using list"); en.put("usage_request", "USAGE: /auction request (Item) MAXPRICE AMOUNT"); en.put("usage_buy","USAGE: /auction buy (Item (MAXPRICE)) AMOUNT"); en.put("usage_detail", "USAGE: /auction detail Item"); en.put("usage_sign", "USAGE: /auction sign REQUEST/OFFER ID - you can get the id by using list/listrequest"); en.put("usage_overview", "USAGE: /auction overview PAGE"); en.put("usage_sell","USAGE: /auction sell Item PRICEPERBLOCK (AMOUNT) OR /auction sell PRICE_EACH for Item in Hand"); en.put("usage_abortrequest","USAGE: /auction abortrequest ID - you can get the id by using list"); en.put("err_num","$1 must be numeric"); en.put("rem_success","Your auction has been cancelled. You can pick it up at the auction house"); en.put("err_invalid_id", "This ID was invalid or you dont have the permissions to do that"); en.put("rem_rec_suc","Your request has been cancelled. You can pick it up at the auction house"); en.put("err_nosale","You don't have items for sale"); en.put("noqsell","Unable to quicksell item - no default price"); en.put("header_list","You've $1 $2. Page: $3 of $4"); en.put("err_noreq","You don't have items requested"); en.put("err_noperm","You're not allowed to do this"); en.put("err_to_ah","You've to go to an auction house to do this"); en.put("err_toohigh","$1 is too high"); en.put("err_nodeliver", "There is nothing for delivery"); en.put("err_block_404","Item not found"); en.put("err_block","Item is invalid"); en.put("err_full_inv", "Your inventory is full"); en.put("err_notrade", "Cannot be traded"); en.put("err","Something went wrong"); en.put("err_nomoney","You don't have enough money"); en.put("err_nooffer","There is no offer which fulfills your options"); en.put("err_nomoney_fee", "You cannot afford the fee of $1 $a"); en.put("suc_offer", "Success. You're offering $1 Items for $2 $a"); en.put("suc_fee_paid","You've paid an auction-fee of $1 $a"); en.put("suc_bought","You've bought the amount you wanted"); en.put("suc_bought_part","You've bought $1 of $2"); en.put("suc_req", "You've requested $1 items for $2 $a"); en.put("suc_rec_item","You've received $1 items"); en.put("suc_rec_money","You've received $1 $a"); en.put("suc_req_part","You've only enough money for $1 items for $2 $a"); en.put("suc_sign","Now just destroy the sign you want to use"); en.put("info", "Auction details about $1"); en.put("goto_ah", "You can collect some items in the auction house"); en.put("collect", "You can collect some items by entering /auction collect"); en.put("amount_sale", "Amount for sale: $1"); en.put("default_price", "Suggested retail price: $1"); en.put("offer","Offer: $1 for $2 $a each"); en.put("request","Request: $1 for $2 $a each"); en.put("err_noitem","You dont own this item"); en.put("welcome","Welcome to KrimSale - worldofminecraft.de"); en.put("suc_sign_com","Success. The sign has been created"); en.put("req_info","Your request is valid for 14 days. If noone offers this item for your price, you'll get your money back"); de.put("usage", "NUTZUNG: /auction SELL/BUY/REQUEST/DETAIL/LIST/SIGN/LISTREQUESTS/COLLECT/ABORT/ABORTREQUEST/OVERVIEW/OVERVIEWREQUEST"); de.put("usage_abort","NUTZUNG: /auction abort ID - die ID erhaelst du mittels List"); de.put("usage_request", "NUTZUNG: /auction request (Item) MAXPREIS MENGE"); de.put("usage_buy","NUTZUNG: /auction buy (Item (MAXPREIS)) MENGE"); de.put("usage_detail", "NUTZUNG: /auction detail Itemname"); de.put("usage_sign", "NUTZUNG: /auction sign REQUEST/OFFER ID - die ID erhaelst du mittels List/Listrequests"); de.put("usage_overview", "NUTZUNG: /auction overview [SEITE]"); de.put("usage_sell","NUTZUNG: /auction sell Item PREISPROBLOCK (MENGE) oder /auction sell PREIS_PRO fuer Gegenstand in der Hand"); de.put("usage_abortrequest","NUTZUNG: /auction abortrequest ID - die ID erhaelst du mittels Listrequest"); de.put("err_num","$1 muss eine Nummer sein"); en.put("noqsell","Kann nicht qsell'en. Es ist kein Standardpreis bekannt"); de.put("rem_success","Deine Auktion wurde abgebrochen. Du kannst deine Gegenstaende im Auktionshaus abholen"); de.put("err_invalid_id", "Diese ID ist ungueltig oder du hast keine Berechtigung dies zu tun"); de.put("rem_rec_suc","Deine Anfrage wurde abgebrochen. Du kannst dein Geld im Auktionshaus abholen"); de.put("err_nosale","Du hast nichts zum Verkauf angeboten"); de.put("header_list","Du hast $1 $2. Seite: $3 von $4"); de.put("err_noreq","Du hast keine Gegenstaende im Ankauf"); de.put("err_noperm","Du darfst dies nicht tun"); de.put("err_to_ah","Du musst in ein Auktionshaus gehen"); de.put("default_price", "Preisempfehlung: $1"); de.put("err_toohigh","$1 ist zu hoch"); - de.put("suc_req_part","Dein Geld hat nur fuer $1 Items zu je $2 $a gereicht"); + de.put("suc_req_part","Dein Geld hat nur fuer $1 Items fuer $2 $a gereicht"); de.put("err_nodeliver", "Dein Postfach ist leer"); de.put("err_block_404","Item nicht gefunden"); de.put("err_block","Item ist ungueltig"); de.put("err_full_inv", "Dein Inventar ist voll"); de.put("err_notrade", "Kann nicht verkauft werden"); de.put("err","Etwas ist schiefgelaufen"); de.put("err_noitem","Du besitzt diesen Gegenstand nicht"); de.put("err_nomoney","Du hast nicht genug Geld"); de.put("err_nooffer","Es gibt keine Angebote die deinen Anforderungen entsprechen"); de.put("err_nomoney_fee", "Du kannst dir die Gebuehren von $1 $a nicht leisten"); de.put("suc_offer", "Erfolgreich eingestellt: $1 Items fuer $2 $a"); de.put("suc_fee_paid","Du hast $1 $a an Gebuehren bezahlt"); de.put("suc_bought","Du hast alles Gekauft, was du haben wolltest"); de.put("suc_bought_part","Du hast $1 von $2 gekauft"); de.put("suc_req", "Du hast $1 Gegenstaende zum Ankauf von $2 $a eingetragen"); de.put("suc_rec_item","Du hast $1 Items erhalten"); de.put("suc_rec_money","Du hast $1 $a erhalten"); de.put("suc_sign","Jetzt zerstoere das Schild, welches du nutzen willst"); de.put("suc_sign_com","Erfolgreich. Das Schild wurde angelegt"); de.put("info", "Auktionsinformationen ueber $1"); de.put("goto_ah", "Einige Waren koennen im Auktionshaus abgeholt werden"); de.put("collect", "Du kannst einige Waren via /auction collect empfangen"); de.put("amount_sale", "Menge zum Verkauf: $1"); de.put("offer","Angebot: $1 fuer je $2 $a"); de.put("request","Anfrage: $1 fuer je $2 $a"); de.put("welcome","Willkommen bei KrimSale - worldofminecraft.de"); de.put("req_info","Deine Anfrage ist fuer 14 Tage gueltig. Wenn dies nicht erfolgreich ist bekommst du dein Geld wieder"); this.lng.put("en", en); this.lng.put("de", de); if(this.lng.get(configManager.lang) == null) this.lang = "en"; else this.lang = configManager.lang; } public void msg(Player p, String el) { this.msg((Player)p, el, new Object[]{}); } public void msg(CommandSender p, String el) { this.msg((Player)p, el, new Object[]{}); } public void msg(CommandSender p, String el, Object[] args) { this.msg((Player)p, el, args); } public void msg(Player p,String el, Object[] args) { String str = el; if(this.lng.get(this.lang).get(el) != null) str = this.lng.get(this.lang).get(el); str = str.replace("$a", Main.econ.currencyNamePlural()); if(args != null && args.length > 0) { for (int i = 1; i < args.length+1; i++) { String tmp = ""; if(args[i-1] instanceof String) tmp = (String)args[i-1]; else if(args[i-1] instanceof Integer) tmp = new Integer((Integer)args[i-1]).toString(); str = str.replace("$" + i, tmp); } str = str.replace("$$", "$"); } p.sendMessage(ChatColor.GOLD + str); } }
true
true
public KSLang() { HashMap<String,String> en = new HashMap<String,String>(); HashMap<String,String> de = new HashMap<String,String>(); en.put("usage", "USAGE: /auction SELL/BUY/REQUEST/DETAIL/LIST/SIGN/LISTREQUESTS/COLLECT/ABORT/ABORTREQUEST/OVERVIEW/OVERVIEWREQUEST"); en.put("usage_abort","USAGE: /auction abort ID - you can get the id by using list"); en.put("usage_request", "USAGE: /auction request (Item) MAXPRICE AMOUNT"); en.put("usage_buy","USAGE: /auction buy (Item (MAXPRICE)) AMOUNT"); en.put("usage_detail", "USAGE: /auction detail Item"); en.put("usage_sign", "USAGE: /auction sign REQUEST/OFFER ID - you can get the id by using list/listrequest"); en.put("usage_overview", "USAGE: /auction overview PAGE"); en.put("usage_sell","USAGE: /auction sell Item PRICEPERBLOCK (AMOUNT) OR /auction sell PRICE_EACH for Item in Hand"); en.put("usage_abortrequest","USAGE: /auction abortrequest ID - you can get the id by using list"); en.put("err_num","$1 must be numeric"); en.put("rem_success","Your auction has been cancelled. You can pick it up at the auction house"); en.put("err_invalid_id", "This ID was invalid or you dont have the permissions to do that"); en.put("rem_rec_suc","Your request has been cancelled. You can pick it up at the auction house"); en.put("err_nosale","You don't have items for sale"); en.put("noqsell","Unable to quicksell item - no default price"); en.put("header_list","You've $1 $2. Page: $3 of $4"); en.put("err_noreq","You don't have items requested"); en.put("err_noperm","You're not allowed to do this"); en.put("err_to_ah","You've to go to an auction house to do this"); en.put("err_toohigh","$1 is too high"); en.put("err_nodeliver", "There is nothing for delivery"); en.put("err_block_404","Item not found"); en.put("err_block","Item is invalid"); en.put("err_full_inv", "Your inventory is full"); en.put("err_notrade", "Cannot be traded"); en.put("err","Something went wrong"); en.put("err_nomoney","You don't have enough money"); en.put("err_nooffer","There is no offer which fulfills your options"); en.put("err_nomoney_fee", "You cannot afford the fee of $1 $a"); en.put("suc_offer", "Success. You're offering $1 Items for $2 $a"); en.put("suc_fee_paid","You've paid an auction-fee of $1 $a"); en.put("suc_bought","You've bought the amount you wanted"); en.put("suc_bought_part","You've bought $1 of $2"); en.put("suc_req", "You've requested $1 items for $2 $a"); en.put("suc_rec_item","You've received $1 items"); en.put("suc_rec_money","You've received $1 $a"); en.put("suc_req_part","You've only enough money for $1 items for $2 $a"); en.put("suc_sign","Now just destroy the sign you want to use"); en.put("info", "Auction details about $1"); en.put("goto_ah", "You can collect some items in the auction house"); en.put("collect", "You can collect some items by entering /auction collect"); en.put("amount_sale", "Amount for sale: $1"); en.put("default_price", "Suggested retail price: $1"); en.put("offer","Offer: $1 for $2 $a each"); en.put("request","Request: $1 for $2 $a each"); en.put("err_noitem","You dont own this item"); en.put("welcome","Welcome to KrimSale - worldofminecraft.de"); en.put("suc_sign_com","Success. The sign has been created"); en.put("req_info","Your request is valid for 14 days. If noone offers this item for your price, you'll get your money back"); de.put("usage", "NUTZUNG: /auction SELL/BUY/REQUEST/DETAIL/LIST/SIGN/LISTREQUESTS/COLLECT/ABORT/ABORTREQUEST/OVERVIEW/OVERVIEWREQUEST"); de.put("usage_abort","NUTZUNG: /auction abort ID - die ID erhaelst du mittels List"); de.put("usage_request", "NUTZUNG: /auction request (Item) MAXPREIS MENGE"); de.put("usage_buy","NUTZUNG: /auction buy (Item (MAXPREIS)) MENGE"); de.put("usage_detail", "NUTZUNG: /auction detail Itemname"); de.put("usage_sign", "NUTZUNG: /auction sign REQUEST/OFFER ID - die ID erhaelst du mittels List/Listrequests"); de.put("usage_overview", "NUTZUNG: /auction overview [SEITE]"); de.put("usage_sell","NUTZUNG: /auction sell Item PREISPROBLOCK (MENGE) oder /auction sell PREIS_PRO fuer Gegenstand in der Hand"); de.put("usage_abortrequest","NUTZUNG: /auction abortrequest ID - die ID erhaelst du mittels Listrequest"); de.put("err_num","$1 muss eine Nummer sein"); en.put("noqsell","Kann nicht qsell'en. Es ist kein Standardpreis bekannt"); de.put("rem_success","Deine Auktion wurde abgebrochen. Du kannst deine Gegenstaende im Auktionshaus abholen"); de.put("err_invalid_id", "Diese ID ist ungueltig oder du hast keine Berechtigung dies zu tun"); de.put("rem_rec_suc","Deine Anfrage wurde abgebrochen. Du kannst dein Geld im Auktionshaus abholen"); de.put("err_nosale","Du hast nichts zum Verkauf angeboten"); de.put("header_list","Du hast $1 $2. Seite: $3 von $4"); de.put("err_noreq","Du hast keine Gegenstaende im Ankauf"); de.put("err_noperm","Du darfst dies nicht tun"); de.put("err_to_ah","Du musst in ein Auktionshaus gehen"); de.put("default_price", "Preisempfehlung: $1"); de.put("err_toohigh","$1 ist zu hoch"); de.put("suc_req_part","Dein Geld hat nur fuer $1 Items zu je $2 $a gereicht"); de.put("err_nodeliver", "Dein Postfach ist leer"); de.put("err_block_404","Item nicht gefunden"); de.put("err_block","Item ist ungueltig"); de.put("err_full_inv", "Dein Inventar ist voll"); de.put("err_notrade", "Kann nicht verkauft werden"); de.put("err","Etwas ist schiefgelaufen"); de.put("err_noitem","Du besitzt diesen Gegenstand nicht"); de.put("err_nomoney","Du hast nicht genug Geld"); de.put("err_nooffer","Es gibt keine Angebote die deinen Anforderungen entsprechen"); de.put("err_nomoney_fee", "Du kannst dir die Gebuehren von $1 $a nicht leisten"); de.put("suc_offer", "Erfolgreich eingestellt: $1 Items fuer $2 $a"); de.put("suc_fee_paid","Du hast $1 $a an Gebuehren bezahlt"); de.put("suc_bought","Du hast alles Gekauft, was du haben wolltest"); de.put("suc_bought_part","Du hast $1 von $2 gekauft"); de.put("suc_req", "Du hast $1 Gegenstaende zum Ankauf von $2 $a eingetragen"); de.put("suc_rec_item","Du hast $1 Items erhalten"); de.put("suc_rec_money","Du hast $1 $a erhalten"); de.put("suc_sign","Jetzt zerstoere das Schild, welches du nutzen willst"); de.put("suc_sign_com","Erfolgreich. Das Schild wurde angelegt"); de.put("info", "Auktionsinformationen ueber $1"); de.put("goto_ah", "Einige Waren koennen im Auktionshaus abgeholt werden"); de.put("collect", "Du kannst einige Waren via /auction collect empfangen"); de.put("amount_sale", "Menge zum Verkauf: $1"); de.put("offer","Angebot: $1 fuer je $2 $a"); de.put("request","Anfrage: $1 fuer je $2 $a"); de.put("welcome","Willkommen bei KrimSale - worldofminecraft.de"); de.put("req_info","Deine Anfrage ist fuer 14 Tage gueltig. Wenn dies nicht erfolgreich ist bekommst du dein Geld wieder"); this.lng.put("en", en); this.lng.put("de", de); if(this.lng.get(configManager.lang) == null) this.lang = "en"; else this.lang = configManager.lang; }
public KSLang() { HashMap<String,String> en = new HashMap<String,String>(); HashMap<String,String> de = new HashMap<String,String>(); en.put("usage", "USAGE: /auction SELL/BUY/REQUEST/DETAIL/LIST/SIGN/LISTREQUESTS/COLLECT/ABORT/ABORTREQUEST/OVERVIEW/OVERVIEWREQUEST"); en.put("usage_abort","USAGE: /auction abort ID - you can get the id by using list"); en.put("usage_request", "USAGE: /auction request (Item) MAXPRICE AMOUNT"); en.put("usage_buy","USAGE: /auction buy (Item (MAXPRICE)) AMOUNT"); en.put("usage_detail", "USAGE: /auction detail Item"); en.put("usage_sign", "USAGE: /auction sign REQUEST/OFFER ID - you can get the id by using list/listrequest"); en.put("usage_overview", "USAGE: /auction overview PAGE"); en.put("usage_sell","USAGE: /auction sell Item PRICEPERBLOCK (AMOUNT) OR /auction sell PRICE_EACH for Item in Hand"); en.put("usage_abortrequest","USAGE: /auction abortrequest ID - you can get the id by using list"); en.put("err_num","$1 must be numeric"); en.put("rem_success","Your auction has been cancelled. You can pick it up at the auction house"); en.put("err_invalid_id", "This ID was invalid or you dont have the permissions to do that"); en.put("rem_rec_suc","Your request has been cancelled. You can pick it up at the auction house"); en.put("err_nosale","You don't have items for sale"); en.put("noqsell","Unable to quicksell item - no default price"); en.put("header_list","You've $1 $2. Page: $3 of $4"); en.put("err_noreq","You don't have items requested"); en.put("err_noperm","You're not allowed to do this"); en.put("err_to_ah","You've to go to an auction house to do this"); en.put("err_toohigh","$1 is too high"); en.put("err_nodeliver", "There is nothing for delivery"); en.put("err_block_404","Item not found"); en.put("err_block","Item is invalid"); en.put("err_full_inv", "Your inventory is full"); en.put("err_notrade", "Cannot be traded"); en.put("err","Something went wrong"); en.put("err_nomoney","You don't have enough money"); en.put("err_nooffer","There is no offer which fulfills your options"); en.put("err_nomoney_fee", "You cannot afford the fee of $1 $a"); en.put("suc_offer", "Success. You're offering $1 Items for $2 $a"); en.put("suc_fee_paid","You've paid an auction-fee of $1 $a"); en.put("suc_bought","You've bought the amount you wanted"); en.put("suc_bought_part","You've bought $1 of $2"); en.put("suc_req", "You've requested $1 items for $2 $a"); en.put("suc_rec_item","You've received $1 items"); en.put("suc_rec_money","You've received $1 $a"); en.put("suc_req_part","You've only enough money for $1 items for $2 $a"); en.put("suc_sign","Now just destroy the sign you want to use"); en.put("info", "Auction details about $1"); en.put("goto_ah", "You can collect some items in the auction house"); en.put("collect", "You can collect some items by entering /auction collect"); en.put("amount_sale", "Amount for sale: $1"); en.put("default_price", "Suggested retail price: $1"); en.put("offer","Offer: $1 for $2 $a each"); en.put("request","Request: $1 for $2 $a each"); en.put("err_noitem","You dont own this item"); en.put("welcome","Welcome to KrimSale - worldofminecraft.de"); en.put("suc_sign_com","Success. The sign has been created"); en.put("req_info","Your request is valid for 14 days. If noone offers this item for your price, you'll get your money back"); de.put("usage", "NUTZUNG: /auction SELL/BUY/REQUEST/DETAIL/LIST/SIGN/LISTREQUESTS/COLLECT/ABORT/ABORTREQUEST/OVERVIEW/OVERVIEWREQUEST"); de.put("usage_abort","NUTZUNG: /auction abort ID - die ID erhaelst du mittels List"); de.put("usage_request", "NUTZUNG: /auction request (Item) MAXPREIS MENGE"); de.put("usage_buy","NUTZUNG: /auction buy (Item (MAXPREIS)) MENGE"); de.put("usage_detail", "NUTZUNG: /auction detail Itemname"); de.put("usage_sign", "NUTZUNG: /auction sign REQUEST/OFFER ID - die ID erhaelst du mittels List/Listrequests"); de.put("usage_overview", "NUTZUNG: /auction overview [SEITE]"); de.put("usage_sell","NUTZUNG: /auction sell Item PREISPROBLOCK (MENGE) oder /auction sell PREIS_PRO fuer Gegenstand in der Hand"); de.put("usage_abortrequest","NUTZUNG: /auction abortrequest ID - die ID erhaelst du mittels Listrequest"); de.put("err_num","$1 muss eine Nummer sein"); en.put("noqsell","Kann nicht qsell'en. Es ist kein Standardpreis bekannt"); de.put("rem_success","Deine Auktion wurde abgebrochen. Du kannst deine Gegenstaende im Auktionshaus abholen"); de.put("err_invalid_id", "Diese ID ist ungueltig oder du hast keine Berechtigung dies zu tun"); de.put("rem_rec_suc","Deine Anfrage wurde abgebrochen. Du kannst dein Geld im Auktionshaus abholen"); de.put("err_nosale","Du hast nichts zum Verkauf angeboten"); de.put("header_list","Du hast $1 $2. Seite: $3 von $4"); de.put("err_noreq","Du hast keine Gegenstaende im Ankauf"); de.put("err_noperm","Du darfst dies nicht tun"); de.put("err_to_ah","Du musst in ein Auktionshaus gehen"); de.put("default_price", "Preisempfehlung: $1"); de.put("err_toohigh","$1 ist zu hoch"); de.put("suc_req_part","Dein Geld hat nur fuer $1 Items fuer $2 $a gereicht"); de.put("err_nodeliver", "Dein Postfach ist leer"); de.put("err_block_404","Item nicht gefunden"); de.put("err_block","Item ist ungueltig"); de.put("err_full_inv", "Dein Inventar ist voll"); de.put("err_notrade", "Kann nicht verkauft werden"); de.put("err","Etwas ist schiefgelaufen"); de.put("err_noitem","Du besitzt diesen Gegenstand nicht"); de.put("err_nomoney","Du hast nicht genug Geld"); de.put("err_nooffer","Es gibt keine Angebote die deinen Anforderungen entsprechen"); de.put("err_nomoney_fee", "Du kannst dir die Gebuehren von $1 $a nicht leisten"); de.put("suc_offer", "Erfolgreich eingestellt: $1 Items fuer $2 $a"); de.put("suc_fee_paid","Du hast $1 $a an Gebuehren bezahlt"); de.put("suc_bought","Du hast alles Gekauft, was du haben wolltest"); de.put("suc_bought_part","Du hast $1 von $2 gekauft"); de.put("suc_req", "Du hast $1 Gegenstaende zum Ankauf von $2 $a eingetragen"); de.put("suc_rec_item","Du hast $1 Items erhalten"); de.put("suc_rec_money","Du hast $1 $a erhalten"); de.put("suc_sign","Jetzt zerstoere das Schild, welches du nutzen willst"); de.put("suc_sign_com","Erfolgreich. Das Schild wurde angelegt"); de.put("info", "Auktionsinformationen ueber $1"); de.put("goto_ah", "Einige Waren koennen im Auktionshaus abgeholt werden"); de.put("collect", "Du kannst einige Waren via /auction collect empfangen"); de.put("amount_sale", "Menge zum Verkauf: $1"); de.put("offer","Angebot: $1 fuer je $2 $a"); de.put("request","Anfrage: $1 fuer je $2 $a"); de.put("welcome","Willkommen bei KrimSale - worldofminecraft.de"); de.put("req_info","Deine Anfrage ist fuer 14 Tage gueltig. Wenn dies nicht erfolgreich ist bekommst du dein Geld wieder"); this.lng.put("en", en); this.lng.put("de", de); if(this.lng.get(configManager.lang) == null) this.lang = "en"; else this.lang = configManager.lang; }
diff --git a/orbisgis-view/src/main/java/org/orbisgis/view/geocatalog/Catalog.java b/orbisgis-view/src/main/java/org/orbisgis/view/geocatalog/Catalog.java index 9c3d08e84..9fc0871de 100644 --- a/orbisgis-view/src/main/java/org/orbisgis/view/geocatalog/Catalog.java +++ b/orbisgis-view/src/main/java/org/orbisgis/view/geocatalog/Catalog.java @@ -1,687 +1,687 @@ /** * OrbisGIS is a GIS application dedicated to scientific spatial simulation. * This cross-platform GIS is developed at French IRSTV institute and is able to * manipulate and create vector and raster spatial information. * * OrbisGIS is distributed under GPL 3 license. It is produced by the "Atelier * SIG" team of the IRSTV Institute <http://www.irstv.fr/> CNRS FR 2488. * * Copyright (C) 2007-2012 IRSTV (FR CNRS 2488) * * This file is part of OrbisGIS. * * OrbisGIS is free software: you can redistribute it and/or modify it under the * terms of the GNU General Public License as published by the Free Software * Foundation, either version 3 of the License, or (at your option) any later * version. * * OrbisGIS is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR * A PARTICULAR PURPOSE. See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along with * OrbisGIS. If not, see <http://www.gnu.org/licenses/>. * * For more information, please consult: <http://www.orbisgis.org/> or contact * directly: info_at_ orbisgis.org */ package org.orbisgis.view.geocatalog; import java.awt.BorderLayout; import java.awt.event.ActionListener; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.beans.EventHandler; import java.io.File; import java.net.URI; import java.util.List; import javax.swing.*; import javax.swing.filechooser.FileFilter; import org.apache.commons.io.FilenameUtils; import org.apache.log4j.Logger; import org.gdms.data.DataSourceFactory; import org.gdms.data.SourceAlreadyExistsException; import org.gdms.data.stream.StreamSource; import org.gdms.data.stream.StreamSourceDefinition; import org.gdms.driver.Driver; import org.gdms.driver.FileDriver; import org.gdms.driver.driverManager.DriverFilter; import org.gdms.driver.driverManager.DriverManager; import org.gdms.source.*; import org.gvsig.remoteClient.wms.WMSClient; import org.gvsig.remoteClient.wms.WMSLayer; import org.orbisgis.core.DataManager; import org.orbisgis.core.Services; import org.orbisgis.core.events.EventException; import org.orbisgis.core.events.ListenerContainer; import org.orbisgis.sif.UIFactory; import org.orbisgis.sif.UIPanel; import org.orbisgis.sif.components.SaveFilePanel; import org.orbisgis.utils.CollectionUtils; import org.orbisgis.view.background.BackgroundJob; import org.orbisgis.view.background.BackgroundManager; import org.orbisgis.view.components.filter.DefaultActiveFilter; import org.orbisgis.view.components.filter.FilterFactoryManager; import org.orbisgis.view.docking.DockingPanel; import org.orbisgis.view.docking.DockingPanelParameters; import org.orbisgis.view.edition.EditorManager; import org.orbisgis.view.geocatalog.dialogs.OpenGdmsFilePanel; import org.orbisgis.view.geocatalog.dialogs.OpenGdmsFolderPanel; import org.orbisgis.view.geocatalog.filters.IFilter; import org.orbisgis.view.geocatalog.filters.factories.NameContains; import org.orbisgis.view.geocatalog.filters.factories.NameNotContains; import org.orbisgis.view.geocatalog.filters.factories.SourceTypeIs; import org.orbisgis.view.geocatalog.io.ExportInFileOperation; import org.orbisgis.view.geocatalog.renderer.DataSourceListCellRenderer; import org.orbisgis.view.geocatalog.sourceWizards.db.TableExportPanel; import org.orbisgis.view.geocatalog.sourceWizards.db.TableImportPanel; import org.orbisgis.view.geocatalog.sourceWizards.wms.LayerConfigurationPanel; import org.orbisgis.view.geocatalog.sourceWizards.wms.SRSPanel; import org.orbisgis.view.geocatalog.sourceWizards.wms.WMSConnectionPanel; import org.orbisgis.view.icons.OrbisGISIcon; import org.orbisgis.view.table.TableEditableElement; import org.xnap.commons.i18n.I18n; import org.xnap.commons.i18n.I18nFactory; /** * This is the GeoCatalog panel. That Panel show the list of available * DataSource * * This is connected with the SourceManager model. @note If you want to add new * functionality to data source items without change this class you can use the * eventSourceListPopupMenuCreating listener container to add more items in the * source list pop-up menu. */ public class Catalog extends JPanel implements DockingPanel { //The UID must be incremented when the serialization is not compatible with the new version of this class private static final long serialVersionUID = 1L; private static final I18n I18N = I18nFactory.getI18n(Catalog.class); private static final Logger LOGGER = Logger.getLogger(Catalog.class); private DockingPanelParameters dockingParameters = new DockingPanelParameters(); /* * !< GeoCatalog docked panel properties */ private JList sourceList; private SourceListModel sourceListContent; //The factory shown when the user click on new factory button private static final String DEFAULT_FILTER_FACTORY = "name_contains"; //The popup menu event listener manager private ListenerContainer<MenuPopupEventData> eventSourceListPopupMenuCreating = new ListenerContainer<MenuPopupEventData>(); private FilterFactoryManager<IFilter,DefaultActiveFilter> filterFactoryManager; /** * For the Unit test purpose * * @return The source list instance */ public JList getSourceList() { return sourceList; } /** * The popup menu event listener manager The popup menu is being * created, all listeners are able to feed the menu with custom * functions * * @return */ public ListenerContainer<MenuPopupEventData> getEventSourceListPopupMenuCreating() { return eventSourceListPopupMenuCreating; } /** * Default constructor */ public Catalog() { super(new BorderLayout()); dockingParameters.setName("geocatalog"); dockingParameters.setTitle(I18N.tr("GeoCatalog")); dockingParameters.setTitleIcon(OrbisGISIcon.getIcon("geocatalog")); dockingParameters.setCloseable(true); //Add the Source List in a Scroll Pane, //then add the scroll pane in this panel add(new JScrollPane(makeSourceList()), BorderLayout.CENTER); //Init the filter factory manager filterFactoryManager = new FilterFactoryManager<IFilter,DefaultActiveFilter>(); //Set the factory that must be shown when the user click on add filter button filterFactoryManager.setDefaultFilterFactory(DEFAULT_FILTER_FACTORY); //Set listener on filter change event, this event will update the filters FilterFactoryManager.FilterChangeListener refreshFilterListener = EventHandler.create(FilterFactoryManager.FilterChangeListener.class, sourceListContent, //target of event "setFilters", //target method "source.getFilters" //target method argument ); filterFactoryManager.getEventFilterChange().addListener(sourceListContent,refreshFilterListener); filterFactoryManager.getEventFilterFactoryChange().addListener(sourceListContent,refreshFilterListener); //Add the filter list at the top of the geocatalog add(filterFactoryManager.makeFilterPanel(false), BorderLayout.NORTH); //Create a toolbar to add a new filter JToolBar toolBar = new JToolBar(); JButton button = new JButton(I18N.tr("Add filter"), OrbisGISIcon.getIcon("add_filter")); button.setToolTipText(I18N.tr("Add a new data source filter")); button.addActionListener(EventHandler.create(ActionListener.class, filterFactoryManager, "onAddFilter")); toolBar.add(button); toolBar.addSeparator(); dockingParameters.setToolBar(toolBar); //Add the geocatalog specific filters registerFilterFactories(); } /** * Use service to return the data manager * * @return DataManager instance */ private DataManager getDataManager() { return Services.getService(DataManager.class); } /** * DataSource URI drop. Currently used on file drop by the {@link SourceListTransferHandler}. * * @param uriDrop Uniform Resource Identifier */ public void onDropURI(List<URI> uriDrop) { SourceManager src = getDataManager().getSourceManager(); for (URI uri : uriDrop) { // Use the file name as the data source name if (uri.getScheme().equals("file")) { File file = new File(uri); src.register(src.getUniqueName(FilenameUtils.removeExtension(file.getName())), uri); } else { src.nameAndRegister(uri); } } } /** * For JUnit purpose, return the filter factory manager * * @return Instance of filterFactoryManager */ public FilterFactoryManager<IFilter,DefaultActiveFilter> getFilterFactoryManager() { return filterFactoryManager; } /** * Add the built-ins filter factory */ private void registerFilterFactories() { filterFactoryManager.registerFilterFactory(new NameContains()); filterFactoryManager.registerFilterFactory(new SourceTypeIs()); filterFactoryManager.registerFilterFactory(new NameNotContains()); } /** * The user click on the source list control * * @param e The mouse event fired by the LI */ public void onMouseActionOnSourceList(MouseEvent e) { //Manage selection of items before popping up the menu if (e.isPopupTrigger()) { //Right mouse button under linux and windows int itemUnderMouse = -1; //Item under the position of the mouse event //Find the Item under the position of the mouse cursor for (int i = 0; i < sourceListContent.getSize(); i++) { //If the coordinate of the cursor cover the cell bouding box if (sourceList.getCellBounds(i, i).contains(e.getPoint())) { itemUnderMouse = i; break; } } //Retrieve all selected items index int[] selectedItems = sourceList.getSelectedIndices(); //If there are a list item under the mouse if ((selectedItems != null) && (itemUnderMouse != -1)) { //If the item under the mouse was not previously selected if (!CollectionUtils.contains(selectedItems, itemUnderMouse)) { //Control must be pushed to add the list item to the selection if (e.isControlDown()) { sourceList.addSelectionInterval(itemUnderMouse, itemUnderMouse); } else { //Unselect the other items and select only the item under the mouse sourceList.setSelectionInterval(itemUnderMouse, itemUnderMouse); } } } else if (itemUnderMouse == -1) { //Unselect all items sourceList.clearSelection(); } //Selection are ready, now create the popup menu JPopupMenu popup = makePopupMenu(); if (popup != null) { popup.show(e.getComponent(), e.getX(), e.getY()); } } } /** * The user select one or more data source and request to open * the table editor */ public void onMenuShowTable() { String[] res = getSelectedSources(); EditorManager editorManager = Services.getService(EditorManager.class); for (String source : res) { TableEditableElement tableDocument = new TableEditableElement(source); editorManager.openEditable(tableDocument); } } /** * The user click on the menu item called "Add/File" The user wants to * open a file using the geocatalog. It will open a panel dedicated to * the selection of the wanted files. This panel will then return the * selected files. */ public void onMenuAddFile() { SourceManager sourceManager = getDataManager().getSourceManager(); //Create the SIF panel OpenGdmsFilePanel openDialog = new OpenGdmsFilePanel(I18N.tr("Select the file to add"), sourceManager.getDriverManager()); //Ask SIF to open the dialog if (UIFactory.showDialog(openDialog, true, true)) { // We can retrieve the files that have been selected by the user File[] files = openDialog.getSelectedFiles(); for (int i = 0; i < files.length; i++) { File file = files[i]; //If there is a driver compatible with //this file extensions if (sourceManager.getDriverManager().isFileSupported(file)) { //Try to add the data source try { String name = sourceManager.getUniqueName(FilenameUtils.removeExtension(file.getName())); sourceManager.register(name, file); } catch (SourceAlreadyExistsException e) { LOGGER.error(I18N.tr("This source was already registered"), e); } } } } } /** * Connect to a database and add one or more tables in the geocatalog. */ public void onMenuAddFromDataBase() { SourceManager sm = getDataManager().getSourceManager(); TableImportPanel tableImportPanel = new TableImportPanel(sm); tableImportPanel.setVisible(true); } /** * The user click on the menu item called "clear geocatalog" */ public void onMenuClearGeoCatalog() { //User must validate this action int option = JOptionPane.showConfirmDialog(this, I18N.tr("All data source of the GeoCatalog will be removed. Are you sure ?"), I18N.tr("Clear the GeoCatalog"), JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE); if (option == JOptionPane.YES_OPTION) { sourceListContent.clearAllSourceExceptSystemTables(); } } /** * The user can remove added source from the geocatalog */ public void onMenuRemoveSource() { SourceManager sm = getDataManager().getSourceManager(); String[] res = getSelectedSources(); for (String resource : res) { try { sm.remove(resource); } catch (IllegalStateException e) { LOGGER.error(I18N.tr("Cannot remove the source {0}", resource), e); } } } /** * The user can export a source in a file. */ public void onMenuSaveInfile() { String[] res = getSelectedSources(); DataManager dm = Services.getService(DataManager.class); SourceManager sm = dm.getSourceManager(); DataSourceFactory dsf = dm.getDataSourceFactory(); DriverManager driverManager = sm.getDriverManager(); for (String source : res) { final SaveFilePanel outfilePanel = new SaveFilePanel( "org.orbisgis.core.ui.plugins.views.geocatalog.SaveInFile", I18N.tr("Save the source : " + source)); int type = sm.getSource(source).getType(); DriverFilter filter; if ((type & SourceManager.VECTORIAL) == SourceManager.VECTORIAL) { // no other choice but to add CSV here // because of CSVStringDriver implementation filter = new OrDriverFilter(new VectorialDriverFilter(), new CSVFileDriverFilter()); } else if ((type & SourceManager.RASTER) == SourceManager.RASTER) { filter = new RasterDriverFilter(); } else if ((type & SourceManager.STREAM) == SourceManager.STREAM) { filter = new DriverFilter() { @Override public boolean acceptDriver(Driver driver) { return false; } }; } else { filter = new NotDriverFilter(new RasterDriverFilter()); } Driver[] filtered = driverManager.getDrivers(new AndDriverFilter( filter, new WritableDriverFilter(), new FileDriverFilter())); for (int i = 0; i < filtered.length; i++) { FileDriver fileDriver = (FileDriver) filtered[i]; String[] extensions = fileDriver.getFileExtensions(); outfilePanel.addFilter(extensions, fileDriver.getTypeDescription()); } if (UIFactory.showDialog(outfilePanel, true, true)) { final File savedFile = outfilePanel.getSelectedFile().getAbsoluteFile(); BackgroundManager bm = Services.getService(BackgroundManager.class); bm.backgroundOperation(new ExportInFileOperation(dsf, source, savedFile, this)); } } } /** * The user can save a source in a database */ public void onMenuSaveInDB() { DataManager dm = Services.getService(DataManager.class); SourceManager sm = dm.getSourceManager(); String[] res = getSelectedSources(); TableExportPanel tableExportPanel = new TableExportPanel(res, sm); tableExportPanel.setVisible(true); } /** * The user can load several files from a folder */ public void onMenuAddFilesFromFolder() { final OpenGdmsFolderPanel folderPanel = new OpenGdmsFolderPanel(I18N.tr("Add files from a folder")); if (UIFactory.showDialog(folderPanel, true, true)) { File[] files = folderPanel.getSelectedFiles(); for (final File file : files) { // for each folder, we apply the method processFolder. // We use the filter selected by the user in the panel // to succeed in this operation. BackgroundManager bm = Services.getService(BackgroundManager.class); bm.backgroundOperation(new BackgroundJob() { @Override public String getTaskName() { return I18N.tr("Add from folder"); } @Override public void run(org.orbisgis.progress.ProgressMonitor pm) { processFolder(file, folderPanel.getSelectedFilter(), pm); } }); } } } /** * The user can load several WMS layers from the same server. */ public void onMenuAddWMSServer() { DataManager dm = Services.getService(DataManager.class); SourceManager sm = dm.getSourceManager(); SRSPanel srsPanel = new SRSPanel(); LayerConfigurationPanel layerConfiguration = new LayerConfigurationPanel(srsPanel); WMSConnectionPanel wmsConnection = new WMSConnectionPanel(layerConfiguration); if (UIFactory.showDialog(new UIPanel[]{wmsConnection, layerConfiguration, srsPanel})) { WMSClient client = wmsConnection.getWMSClient(); String validImageFormat = wmsConnection.getFirstImageFormat(client.getFormats()); if (validImageFormat == null) { LOGGER.error(I18N.tr("Cannot find a valid image format for this WMS server")); } else { Object[] layers = layerConfiguration.getSelectedLayers(); for (Object layer : layers) { String layerName = ((WMSLayer) layer).getName(); String uniqueLayerName = layerName; if (sm.exists(layerName)) { uniqueLayerName = sm.getUniqueName(layerName); } StreamSource wmsSource = new StreamSource(client.getHost(), client.getPort(), layerName, "wms", validImageFormat, srsPanel.getSRS()); StreamSourceDefinition streamSourceDefinition = new StreamSourceDefinition(wmsSource); sm.register(uniqueLayerName, streamSourceDefinition); } } } } /** * the method that actually process the content of a directory, or a * file. If the file is acceptable by the FileFilter, it is processed * * @param file * @param pm */ private void processFolder(File file, FileFilter filter, org.orbisgis.progress.ProgressMonitor pm) { if (file.isDirectory()) { pm.startTask(file.getName(), 100); for (File content : file.listFiles()) { if (pm.isCancelled()) { break; } processFolder(content, filter, pm); } pm.endTask(); } else { DataManager dm = Services.getService(DataManager.class); DriverManager dr = dm.getSourceManager().getDriverManager(); if (filter.accept(file) && dr.isFileSupported(file)) { SourceManager sourceManager = dm.getSourceManager(); try { String name = sourceManager.getUniqueName(FilenameUtils.removeExtension(file.getName())); sourceManager.register(name, file); } catch (SourceAlreadyExistsException e) { LOGGER.error(I18N.tr("The source is already registered : "), e); } } } } /** * Create a popup menu corresponding to the current state of source * selection * * @return A new popup menu */ private JPopupMenu makePopupMenu() { JPopupMenu rootMenu = new JPopupMenu(); //Popup:Add JMenu addMenu = new JMenu(I18N.tr("Add")); rootMenu.add(addMenu); //Popup:Add:File JMenuItem addFileItem = new JMenuItem( I18N.tr("File"), OrbisGISIcon.getIcon("page_white_add")); addFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuAddFile")); addMenu.add(addFileItem); //Add the database panel addFileItem = new JMenuItem( I18N.tr("DataBase"), OrbisGISIcon.getIcon("database_add")); addFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuAddFromDataBase")); addMenu.add(addFileItem); //Add the server panel addFileItem = new JMenuItem( - I18N.tr("Server"), + I18N.tr("WMS server"), OrbisGISIcon.getIcon("server_connect")); addFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuAddWMSServer")); addMenu.add(addFileItem); //Add files from folder addFileItem = new JMenuItem( I18N.tr("Folder"), OrbisGISIcon.getIcon("folder_add")); addFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuAddFilesFromFolder")); addMenu.add(addFileItem); if (!sourceList.isSelectionEmpty()) { //Popup:Save JMenu saveMenu = new JMenu(I18N.tr("Save")); rootMenu.add(saveMenu); //Popup:Save:File JMenuItem saveInFileItem = new JMenuItem( I18N.tr("File"), OrbisGISIcon.getIcon("page_white_save")); saveInFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuSaveInfile")); saveMenu.add(saveInFileItem); //Popup:Save:File JMenuItem saveInDBItem = new JMenuItem( I18N.tr("Database"), OrbisGISIcon.getIcon("database_save")); saveInDBItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuSaveInDB")); saveMenu.add(saveInDBItem); //Popup:Open attributes JMenuItem openTableMenu = new JMenuItem(I18N.tr("Open the attributes"), OrbisGISIcon.getIcon("openattributes")); openTableMenu.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuShowTable")); rootMenu.add(openTableMenu); } rootMenu.addSeparator(); //Popup:ClearGeocatalog (added if the datasource manager is not empty) DataManager dm = Services.getService(DataManager.class); SourceManager dr = dm.getSourceManager(); if (!dr.isEmpty(true)) { JMenuItem clearCatalogItem = new JMenuItem(I18N.tr("Clear the GeoCatalog"), OrbisGISIcon.getIcon("bin_closed")); clearCatalogItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuClearGeoCatalog")); rootMenu.add(clearCatalogItem); } //Add function to remove a source if (!sourceList.isSelectionEmpty()) { JMenuItem removeSourceItem = new JMenuItem( I18N.tr("Remove the source"), OrbisGISIcon.getIcon("remove")); removeSourceItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuRemoveSource")); rootMenu.add(removeSourceItem); } ////////////////////////////// //Plugins //Add additionnal extern data source functions try { eventSourceListPopupMenuCreating.callListeners(new MenuPopupEventData(rootMenu, this)); } catch (EventException ex) { //A listener cancel the creation of the popup menu LOGGER.warn(I18N.tr("An external code stop the creation of the PopUp menu"), ex); return null; } return rootMenu; } /** * Create the Source List ui component */ private JList makeSourceList() { sourceList = new JList(); //Set the list content renderer sourceList.setCellRenderer(new DataSourceListCellRenderer(sourceList)); //Add mouse listener for popup menu sourceList.addMouseListener(EventHandler.create(MouseListener.class, this, "onMouseActionOnSourceList", "")); //This method ask the event data as argument //Create the list content manager sourceListContent = new SourceListModel(); //Replace the default model by the GeoCatalog model sourceList.setModel(sourceListContent); SourceListTransferHandler transferHandler = new SourceListTransferHandler(); //Call the method this.onDropURI when the user drop uri(like files) on the list control transferHandler.getDropListenerHandler().addListener(this, EventHandler.create(SourceListTransferHandler.DropUriListener.class, this, "onDropURI", "uriList")); sourceList.setTransferHandler(transferHandler); sourceList.setDragEnabled(true); //Attach the content to the DataSource instance sourceListContent.setListeners(); return sourceList; } /** * Free listeners, Catalog must not be reachable to let the Garbage * Collector free this instance */ public void dispose() { //Remove listeners linked with the source list content filterFactoryManager.getEventFilterChange().clearListeners(); filterFactoryManager.getEventFilterFactoryChange().clearListeners(); sourceListContent.dispose(); } /** * Return the names of the selected sources in the geocatalog. * * @return */ public String[] getSelectedSources() { Object[] selectedValues = getSourceList().getSelectedValues(); String[] sources = new String[selectedValues.length]; for (int i = 0; i < sources.length; i++) { sources[i] = selectedValues[i].toString(); } return sources; } /** * Give information on the behaviour of this panel related to the * current docking system * * @return The panel parameter instance */ @Override public DockingPanelParameters getDockingParameters() { return dockingParameters; } @Override public JComponent getComponent() { return this; } }
true
true
private JPopupMenu makePopupMenu() { JPopupMenu rootMenu = new JPopupMenu(); //Popup:Add JMenu addMenu = new JMenu(I18N.tr("Add")); rootMenu.add(addMenu); //Popup:Add:File JMenuItem addFileItem = new JMenuItem( I18N.tr("File"), OrbisGISIcon.getIcon("page_white_add")); addFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuAddFile")); addMenu.add(addFileItem); //Add the database panel addFileItem = new JMenuItem( I18N.tr("DataBase"), OrbisGISIcon.getIcon("database_add")); addFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuAddFromDataBase")); addMenu.add(addFileItem); //Add the server panel addFileItem = new JMenuItem( I18N.tr("Server"), OrbisGISIcon.getIcon("server_connect")); addFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuAddWMSServer")); addMenu.add(addFileItem); //Add files from folder addFileItem = new JMenuItem( I18N.tr("Folder"), OrbisGISIcon.getIcon("folder_add")); addFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuAddFilesFromFolder")); addMenu.add(addFileItem); if (!sourceList.isSelectionEmpty()) { //Popup:Save JMenu saveMenu = new JMenu(I18N.tr("Save")); rootMenu.add(saveMenu); //Popup:Save:File JMenuItem saveInFileItem = new JMenuItem( I18N.tr("File"), OrbisGISIcon.getIcon("page_white_save")); saveInFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuSaveInfile")); saveMenu.add(saveInFileItem); //Popup:Save:File JMenuItem saveInDBItem = new JMenuItem( I18N.tr("Database"), OrbisGISIcon.getIcon("database_save")); saveInDBItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuSaveInDB")); saveMenu.add(saveInDBItem); //Popup:Open attributes JMenuItem openTableMenu = new JMenuItem(I18N.tr("Open the attributes"), OrbisGISIcon.getIcon("openattributes")); openTableMenu.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuShowTable")); rootMenu.add(openTableMenu); } rootMenu.addSeparator(); //Popup:ClearGeocatalog (added if the datasource manager is not empty) DataManager dm = Services.getService(DataManager.class); SourceManager dr = dm.getSourceManager(); if (!dr.isEmpty(true)) { JMenuItem clearCatalogItem = new JMenuItem(I18N.tr("Clear the GeoCatalog"), OrbisGISIcon.getIcon("bin_closed")); clearCatalogItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuClearGeoCatalog")); rootMenu.add(clearCatalogItem); } //Add function to remove a source if (!sourceList.isSelectionEmpty()) { JMenuItem removeSourceItem = new JMenuItem( I18N.tr("Remove the source"), OrbisGISIcon.getIcon("remove")); removeSourceItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuRemoveSource")); rootMenu.add(removeSourceItem); } ////////////////////////////// //Plugins //Add additionnal extern data source functions try { eventSourceListPopupMenuCreating.callListeners(new MenuPopupEventData(rootMenu, this)); } catch (EventException ex) { //A listener cancel the creation of the popup menu LOGGER.warn(I18N.tr("An external code stop the creation of the PopUp menu"), ex); return null; } return rootMenu; }
private JPopupMenu makePopupMenu() { JPopupMenu rootMenu = new JPopupMenu(); //Popup:Add JMenu addMenu = new JMenu(I18N.tr("Add")); rootMenu.add(addMenu); //Popup:Add:File JMenuItem addFileItem = new JMenuItem( I18N.tr("File"), OrbisGISIcon.getIcon("page_white_add")); addFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuAddFile")); addMenu.add(addFileItem); //Add the database panel addFileItem = new JMenuItem( I18N.tr("DataBase"), OrbisGISIcon.getIcon("database_add")); addFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuAddFromDataBase")); addMenu.add(addFileItem); //Add the server panel addFileItem = new JMenuItem( I18N.tr("WMS server"), OrbisGISIcon.getIcon("server_connect")); addFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuAddWMSServer")); addMenu.add(addFileItem); //Add files from folder addFileItem = new JMenuItem( I18N.tr("Folder"), OrbisGISIcon.getIcon("folder_add")); addFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuAddFilesFromFolder")); addMenu.add(addFileItem); if (!sourceList.isSelectionEmpty()) { //Popup:Save JMenu saveMenu = new JMenu(I18N.tr("Save")); rootMenu.add(saveMenu); //Popup:Save:File JMenuItem saveInFileItem = new JMenuItem( I18N.tr("File"), OrbisGISIcon.getIcon("page_white_save")); saveInFileItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuSaveInfile")); saveMenu.add(saveInFileItem); //Popup:Save:File JMenuItem saveInDBItem = new JMenuItem( I18N.tr("Database"), OrbisGISIcon.getIcon("database_save")); saveInDBItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuSaveInDB")); saveMenu.add(saveInDBItem); //Popup:Open attributes JMenuItem openTableMenu = new JMenuItem(I18N.tr("Open the attributes"), OrbisGISIcon.getIcon("openattributes")); openTableMenu.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuShowTable")); rootMenu.add(openTableMenu); } rootMenu.addSeparator(); //Popup:ClearGeocatalog (added if the datasource manager is not empty) DataManager dm = Services.getService(DataManager.class); SourceManager dr = dm.getSourceManager(); if (!dr.isEmpty(true)) { JMenuItem clearCatalogItem = new JMenuItem(I18N.tr("Clear the GeoCatalog"), OrbisGISIcon.getIcon("bin_closed")); clearCatalogItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuClearGeoCatalog")); rootMenu.add(clearCatalogItem); } //Add function to remove a source if (!sourceList.isSelectionEmpty()) { JMenuItem removeSourceItem = new JMenuItem( I18N.tr("Remove the source"), OrbisGISIcon.getIcon("remove")); removeSourceItem.addActionListener(EventHandler.create(ActionListener.class, this, "onMenuRemoveSource")); rootMenu.add(removeSourceItem); } ////////////////////////////// //Plugins //Add additionnal extern data source functions try { eventSourceListPopupMenuCreating.callListeners(new MenuPopupEventData(rootMenu, this)); } catch (EventException ex) { //A listener cancel the creation of the popup menu LOGGER.warn(I18N.tr("An external code stop the creation of the PopUp menu"), ex); return null; } return rootMenu; }
diff --git a/src/org/community/intellij/plugins/communitycase/Branch.java b/src/org/community/intellij/plugins/communitycase/Branch.java index 6c4e781..977468c 100644 --- a/src/org/community/intellij/plugins/communitycase/Branch.java +++ b/src/org/community/intellij/plugins/communitycase/Branch.java @@ -1,279 +1,279 @@ /* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.community.intellij.plugins.communitycase; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vfs.VirtualFile; import org.community.intellij.plugins.communitycase.commands.Command; import org.community.intellij.plugins.communitycase.commands.SimpleHandler; import org.community.intellij.plugins.communitycase.config.ConfigUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.regex.Pattern; /** * This data class represents a branch */ public class Branch extends Reference { @NonNls public static final String NO_BRANCH_NAME = "(no branch)"; // The name that specifies that is on specific commit rather then on some branch ({@value}) @NonNls public static final String REFS_HEADS_PREFIX = "/main/"; // Prefix for local branches ({@value}) @NonNls public static final String REFS_REMOTES_PREFIX = "refs/remotes/"; // Prefix for remote branches ({@value}) private final boolean myRemote; private final boolean myActive; public Branch(@NotNull String name, boolean active, boolean remote) { super(name); myRemote = remote; myActive = active; } /** * @return true if the branch is remote */ public boolean isRemote() { return myRemote; } /** * @return true if the branch is active */ public boolean isActive() { return myActive; } @NotNull public String getFullName() { return (myRemote ? REFS_REMOTES_PREFIX : REFS_HEADS_PREFIX) + myName; } /** * Get tracked remote for the branch * * @param project the context project * @param root the VCS root to investigate * @return the remote name for tracked branch, "." meaning the current repository, or null if no branch is tracked * @throws VcsException if there is a problem with running version control */ @Nullable public String getTrackedRemoteName(Project project, VirtualFile root) throws VcsException { return ConfigUtil.getValue(project, root, trackedRemoteKey()); } /** * Get tracked the branch * * @param project the context project * @param root the VCS root to investigate * @return the name of tracked branch * @throws com.intellij.openapi.vcs.VcsException if there is a problem with running version control */ @Nullable public String getTrackedBranchName(Project project, VirtualFile root) throws VcsException { return ConfigUtil.getValue(project, root, trackedBranchKey()); } /** * Get current branch from . * * @param project a project * @param root vcs root * @return the current branch or null if there is no current branch or if specific commit has been checked out. * @throws com.intellij.openapi.vcs.VcsException if there is a problem running */ @Nullable public static Branch current(Project project, VirtualFile root) throws VcsException { return list(project, root, false, false, null, null); } /** * List branches for the root as strings. * @see #list(com.intellij.openapi.project.Project, com.intellij.openapi.vfs.VirtualFile, boolean, boolean, java.util.Collection, String) */ @Nullable public static Branch listAsStrings(final Project project, final VirtualFile root, final boolean remote, final boolean local, final Collection<String> branches, @Nullable final String containingCommit) throws VcsException { final Collection<Branch> Branches = new ArrayList<Branch>(); final Branch result = list(project, root, local, remote, Branches, containingCommit); for (Branch b : Branches) { branches.add(b.getName()); } return result; } /** * List branches in the repository. Supply a Collection to this method, and it will be filled by branches. * @param project the context project * @param root the root * @param localWanted should local branches be collected. * @param remoteWanted should remote branches be collected. * @param branches the collection which will be used to store branches. * Can be null - then the method does the same as {@link #current(com.intellij.openapi.project.Project, com.intellij.openapi.vfs.VirtualFile)} * @param containingCommit show only branches which contain the specified commit. If null, no commit filtering is performed. * @return current branch. May be null if no branch is active. * @throws com.intellij.openapi.vcs.VcsException if there is a problem with running */ @Nullable public static Branch list(final Project project, final VirtualFile root, final boolean localWanted, final boolean remoteWanted, @Nullable final Collection<Branch> branches, @Nullable final String containingCommit) throws VcsException { // preparing native command executor final SimpleHandler handler = new SimpleHandler(project, root, Command.BRANCH); handler.setRemote(true); handler.setSilent(true); //handler.addParameters("--no-color"); if (remoteWanted && localWanted) { // handler.addParameters("-a"); } else if (remoteWanted) { // handler.addParameters("-r"); } if (containingCommit != null) { // handler.addParameters("--contains", containingCommit); } final String output = handler.run(); if (output.trim().length() == 0) { // the case after init and before first commit - there is no branch and no output, and we'll take refs/heads/master String head; try { head = new String(FileUtil.loadFileText(new File(root.getPath(), "./HEAD"), Util.UTF8_ENCODING)).trim(); final String prefix = "ref: refs/heads/"; return head.startsWith(prefix) ? new Branch(head.substring(prefix.length()), true, false) : null; } catch (IOException e) { return null; } } // standard situation. output example: // master //* my_feature // remotes/origin/eap // remotes/origin/feature // remotes/origin/master // also possible: //* (no branch) final String[] split = output.split("\n"); Branch currentBranch = null; String branchFilter = Vcs.getInstance(project).getAppSettings().getBranchFilter(); for (String b : split) { - if(branchFilter==null || Pattern.matches(branchFilter,b)) { + if(branchFilter==null || branchFilter.isEmpty() || Pattern.matches(branchFilter,b)) { final Branch branch = new Branch(b, false, false); //currentBranch = branch; if (branches != null) { branches.add(branch); } } } return null; } /** * Set tracked branch * * @param project the context project * @param root the root * @param remote the remote to track (null, for do not track anything, "." for local repository) * @param branch the branch to track */ public void setTrackedBranch(Project project, VirtualFile root, String remote, String branch) throws VcsException { if (remote == null || branch == null) { ConfigUtil.unsetValue(project, root, trackedRemoteKey()); ConfigUtil.unsetValue(project, root, trackedBranchKey()); } else { ConfigUtil.setValue(project, root, trackedRemoteKey(), remote); ConfigUtil.setValue(project, root, trackedBranchKey(), branch); } } /** * @return the key for the remote of the tracked branch */ private String trackedBranchKey() { return "branch." + getName() + ".merge"; } /** * @return the key for the tracked branch */ private String trackedRemoteKey() { return "branch." + getName() + ".remote"; } /** * Get tracked branch for the current branch * * @param project the project * @param root the vcs root * @return the tracked branch * @throws com.intellij.openapi.vcs.VcsException if there is a problem with accessing configuration file */ @Nullable public Branch tracked(Project project, VirtualFile root) throws VcsException { String remote = getTrackedRemoteName(project, root); if (remote == null) { return null; } String branch = getTrackedBranchName(project, root); if (branch == null) { return null; } if (branch.startsWith(REFS_HEADS_PREFIX)) { branch = branch.substring(REFS_HEADS_PREFIX.length()); } boolean remoteFlag; if (!".".equals(remote)) { branch = remote + "/" + branch; remoteFlag = true; } else { remoteFlag = false; } return new Branch(branch, false, remoteFlag); } /** * Get a merge base between the current branch and specified branch. * * @param project the current project * @param root the vcs root * @param branch the branch * @return the common commit or null if the there is no common commit * @throws com.intellij.openapi.vcs.VcsException the exception */ @Nullable public RevisionNumber getMergeBase(@NotNull Project project, @NotNull VirtualFile root, @NotNull Branch branch) throws VcsException { SimpleHandler h = new SimpleHandler(project, root, Command.MERGE_BASE); h.setRemote(true); h.setSilent(true); h.addParameters(this.getFullName(), branch.getFullName()); String output = h.run().trim(); if (output.length() == 0) { return null; } else { return RevisionNumber.resolve(project, root, output); } } }
true
true
public static Branch list(final Project project, final VirtualFile root, final boolean localWanted, final boolean remoteWanted, @Nullable final Collection<Branch> branches, @Nullable final String containingCommit) throws VcsException { // preparing native command executor final SimpleHandler handler = new SimpleHandler(project, root, Command.BRANCH); handler.setRemote(true); handler.setSilent(true); //handler.addParameters("--no-color"); if (remoteWanted && localWanted) { // handler.addParameters("-a"); } else if (remoteWanted) { // handler.addParameters("-r"); } if (containingCommit != null) { // handler.addParameters("--contains", containingCommit); } final String output = handler.run(); if (output.trim().length() == 0) { // the case after init and before first commit - there is no branch and no output, and we'll take refs/heads/master String head; try { head = new String(FileUtil.loadFileText(new File(root.getPath(), "./HEAD"), Util.UTF8_ENCODING)).trim(); final String prefix = "ref: refs/heads/"; return head.startsWith(prefix) ? new Branch(head.substring(prefix.length()), true, false) : null; } catch (IOException e) { return null; } } // standard situation. output example: // master //* my_feature // remotes/origin/eap // remotes/origin/feature // remotes/origin/master // also possible: //* (no branch) final String[] split = output.split("\n"); Branch currentBranch = null; String branchFilter = Vcs.getInstance(project).getAppSettings().getBranchFilter(); for (String b : split) { if(branchFilter==null || Pattern.matches(branchFilter,b)) { final Branch branch = new Branch(b, false, false); //currentBranch = branch; if (branches != null) { branches.add(branch); } } } return null; }
public static Branch list(final Project project, final VirtualFile root, final boolean localWanted, final boolean remoteWanted, @Nullable final Collection<Branch> branches, @Nullable final String containingCommit) throws VcsException { // preparing native command executor final SimpleHandler handler = new SimpleHandler(project, root, Command.BRANCH); handler.setRemote(true); handler.setSilent(true); //handler.addParameters("--no-color"); if (remoteWanted && localWanted) { // handler.addParameters("-a"); } else if (remoteWanted) { // handler.addParameters("-r"); } if (containingCommit != null) { // handler.addParameters("--contains", containingCommit); } final String output = handler.run(); if (output.trim().length() == 0) { // the case after init and before first commit - there is no branch and no output, and we'll take refs/heads/master String head; try { head = new String(FileUtil.loadFileText(new File(root.getPath(), "./HEAD"), Util.UTF8_ENCODING)).trim(); final String prefix = "ref: refs/heads/"; return head.startsWith(prefix) ? new Branch(head.substring(prefix.length()), true, false) : null; } catch (IOException e) { return null; } } // standard situation. output example: // master //* my_feature // remotes/origin/eap // remotes/origin/feature // remotes/origin/master // also possible: //* (no branch) final String[] split = output.split("\n"); Branch currentBranch = null; String branchFilter = Vcs.getInstance(project).getAppSettings().getBranchFilter(); for (String b : split) { if(branchFilter==null || branchFilter.isEmpty() || Pattern.matches(branchFilter,b)) { final Branch branch = new Branch(b, false, false); //currentBranch = branch; if (branches != null) { branches.add(branch); } } } return null; }
diff --git a/src/it/chalmers/dat255_bearded_octo_lama/games/RocketLanderGame.java b/src/it/chalmers/dat255_bearded_octo_lama/games/RocketLanderGame.java index 9db3bde..67083b6 100644 --- a/src/it/chalmers/dat255_bearded_octo_lama/games/RocketLanderGame.java +++ b/src/it/chalmers/dat255_bearded_octo_lama/games/RocketLanderGame.java @@ -1,166 +1,168 @@ package it.chalmers.dat255_bearded_octo_lama.games; import it.chalmers.dat255_bearded_octo_lama.R; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.view.MotionEvent; import android.widget.LinearLayout; public class RocketLanderGame extends AbstractGameView { //Set all physics constants private final int GRAV_ACCEL = 100; private final int ENGINE_ACCEL = 200; private final int ENGINE_SIDE_ACCEL = 100; private final int MAX_SPD = 500; private final int INIT_SPD = 25; //Set goal constants private final int MAX_VERT_SPD = 20; private final int MAX_HORI_SPD = 15; private long lastTime; private int currentYSpd, currentXSpd; private double rocketX, rocketY; private boolean engineIsRunning; private int groundYLevel; private float pressX; private Bitmap rocketBitmap; public RocketLanderGame(Context context, LinearLayout dismissAlarmLayout) { super(context, dismissAlarmLayout); initGame(); } private void initGame() { lastTime = System.currentTimeMillis() + 100; rocketBitmap = BitmapFactory.decodeResource(context.getResources(), R.drawable.rocket); resetGame(); } private void resetGame() { rocketX = getWidth()/2; rocketY = 0; engineIsRunning = false; currentYSpd = INIT_SPD; } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); //When the size of the view is changed we are going to reset the position of the //rocket and reset the ground level to avoid any issues with bad positioning. rocketX = w/2; rocketY = 0; groundYLevel = h/4 * 3; } @Override protected void updateGame() { long now = System.currentTimeMillis(); if(lastTime > now) { return; } double timeSinceLast = (now - lastTime)/1000.0; //Set and calculate acceleration. double xAcceleration = 0; double yAcceleration = GRAV_ACCEL * timeSinceLast; //Calculate new speed of the aircraft. if(engineIsRunning) { //Add engine acceleration. yAcceleration -= ENGINE_ACCEL * timeSinceLast; //Check if the player touches the screen on the left or right side of the rocket. if(pressX > rocketX) { xAcceleration -= ENGINE_SIDE_ACCEL * timeSinceLast; } else { xAcceleration += ENGINE_SIDE_ACCEL * timeSinceLast; } } currentXSpd += xAcceleration * timeSinceLast; currentYSpd += yAcceleration * timeSinceLast; if(currentYSpd > MAX_SPD) { currentYSpd = MAX_SPD; } rocketX += (currentXSpd * timeSinceLast); + //If the rocket goes outside the view we make it appear on the other side + rocketX = (rocketX < 0) ? getWidth() : rocketX % getWidth(); rocketY += (currentYSpd * timeSinceLast); //Check if aircraft has landed or crashed. if(rocketY >= groundYLevel) { //Check if it's a crash. - if(currentYSpd > MAX_VERT_SPD) { + if(currentYSpd > MAX_VERT_SPD || currentXSpd > MAX_HORI_SPD) { resetGame(); } else { //If it's not a crash, end the game. endGame(); } } lastTime = now; } @Override protected void updateGraphics(Canvas c) { float canvasWidth = getWidth(); float canvasHeight = getHeight(); // Paint heaven then ground. painter.setARGB(100, 51, 204, 255); c.drawRect(0, 0, canvasWidth, canvasHeight, painter); painter.setARGB(100, 102, 0, 0); c.drawRect(0, groundYLevel, canvasWidth, canvasHeight, painter); //Draw the rocket c.drawBitmap(rocketBitmap, (float)(rocketX - rocketBitmap.getWidth()/2), (float)(rocketY - rocketBitmap.getHeight()), painter); if(engineIsRunning) { painter.setARGB(100, 255, 100, 0); c.drawCircle((float)rocketX, (float)rocketY-10, 10, painter); } } @Override public boolean onTouchEvent(MotionEvent event) { //Sleep a bit to not overload the system with unnecessary amount of data. try { Thread.sleep(50); } catch (InterruptedException e) { e.printStackTrace(); } //Check for input. switch(event.getAction()) { case MotionEvent.ACTION_DOWN: engineIsRunning = true; pressX = event.getX(); break; case MotionEvent.ACTION_MOVE: pressX = event.getX(); break; case MotionEvent.ACTION_UP: engineIsRunning = false; break; } return true; } }
false
true
protected void updateGame() { long now = System.currentTimeMillis(); if(lastTime > now) { return; } double timeSinceLast = (now - lastTime)/1000.0; //Set and calculate acceleration. double xAcceleration = 0; double yAcceleration = GRAV_ACCEL * timeSinceLast; //Calculate new speed of the aircraft. if(engineIsRunning) { //Add engine acceleration. yAcceleration -= ENGINE_ACCEL * timeSinceLast; //Check if the player touches the screen on the left or right side of the rocket. if(pressX > rocketX) { xAcceleration -= ENGINE_SIDE_ACCEL * timeSinceLast; } else { xAcceleration += ENGINE_SIDE_ACCEL * timeSinceLast; } } currentXSpd += xAcceleration * timeSinceLast; currentYSpd += yAcceleration * timeSinceLast; if(currentYSpd > MAX_SPD) { currentYSpd = MAX_SPD; } rocketX += (currentXSpd * timeSinceLast); rocketY += (currentYSpd * timeSinceLast); //Check if aircraft has landed or crashed. if(rocketY >= groundYLevel) { //Check if it's a crash. if(currentYSpd > MAX_VERT_SPD) { resetGame(); } else { //If it's not a crash, end the game. endGame(); } } lastTime = now; }
protected void updateGame() { long now = System.currentTimeMillis(); if(lastTime > now) { return; } double timeSinceLast = (now - lastTime)/1000.0; //Set and calculate acceleration. double xAcceleration = 0; double yAcceleration = GRAV_ACCEL * timeSinceLast; //Calculate new speed of the aircraft. if(engineIsRunning) { //Add engine acceleration. yAcceleration -= ENGINE_ACCEL * timeSinceLast; //Check if the player touches the screen on the left or right side of the rocket. if(pressX > rocketX) { xAcceleration -= ENGINE_SIDE_ACCEL * timeSinceLast; } else { xAcceleration += ENGINE_SIDE_ACCEL * timeSinceLast; } } currentXSpd += xAcceleration * timeSinceLast; currentYSpd += yAcceleration * timeSinceLast; if(currentYSpd > MAX_SPD) { currentYSpd = MAX_SPD; } rocketX += (currentXSpd * timeSinceLast); //If the rocket goes outside the view we make it appear on the other side rocketX = (rocketX < 0) ? getWidth() : rocketX % getWidth(); rocketY += (currentYSpd * timeSinceLast); //Check if aircraft has landed or crashed. if(rocketY >= groundYLevel) { //Check if it's a crash. if(currentYSpd > MAX_VERT_SPD || currentXSpd > MAX_HORI_SPD) { resetGame(); } else { //If it's not a crash, end the game. endGame(); } } lastTime = now; }
diff --git a/pn-dispatcher/src/main/java/info/papyri/dispatch/BiblioSearch.java b/pn-dispatcher/src/main/java/info/papyri/dispatch/BiblioSearch.java index 5d9524ce..7cacfa84 100644 --- a/pn-dispatcher/src/main/java/info/papyri/dispatch/BiblioSearch.java +++ b/pn-dispatcher/src/main/java/info/papyri/dispatch/BiblioSearch.java @@ -1,190 +1,190 @@ /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package info.papyri.dispatch; import java.io.IOException; import java.io.PrintWriter; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.BufferedReader; import java.io.InputStreamReader; import java.net.URL; import java.net.URLEncoder; import java.net.MalformedURLException; import javax.servlet.ServletConfig; import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.CommonsHttpSolrServer; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import org.apache.solr.client.solrj.SolrRequest.METHOD; /** * * @author hcayless */ public class BiblioSearch extends HttpServlet { private String solrUrl; private URL searchURL; private String xmlPath = ""; private String htmlPath = ""; private String home = ""; private FileUtils util; private SolrUtils solrutil; private static String BiblioSearch = "biblio-search/"; @Override public void init(ServletConfig config) throws ServletException { super.init(config); solrUrl = config.getInitParameter("solrUrl"); xmlPath = config.getInitParameter("xmlPath"); htmlPath = config.getInitParameter("htmlPath"); home = config.getInitParameter("home"); util = new FileUtils(xmlPath, htmlPath); solrutil = new SolrUtils(config); try { searchURL = new URL("file://" + home + "/" + "bibliosearch.html"); } catch (MalformedURLException e) { throw new ServletException(e); } } /** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> methods. * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html;charset=UTF-8"); PrintWriter out = response.getWriter(); BufferedReader reader = null; try { String q = request.getParameter("q"); reader = new BufferedReader(new InputStreamReader(searchURL.openStream())); String line = ""; while ((line = reader.readLine()) != null) { if (line.contains("<!-- Results -->") && !("".equals(q) || q == null)) { SolrServer solr = new CommonsHttpSolrServer(solrUrl + BiblioSearch); int rows = 30; try { rows = Integer.parseInt(request.getParameter("rows")); } catch (Exception e) { } int start = 0; try { start = Integer.parseInt(request.getParameter("start")); } catch (Exception e) {} SolrQuery sq = new SolrQuery(); try { - sq.setQuery(q + "AND NOT id:http*"); + sq.setQuery(q); sq.setStart(start); sq.setRows(rows); sq.addSortField("date", SolrQuery.ORDER.asc); sq.addSortField("sort", SolrQuery.ORDER.asc); QueryRequest req = new QueryRequest(sq); req.setMethod(METHOD.POST); QueryResponse rs = req.process(solr); SolrDocumentList docs = rs.getResults(); out.println("<p>" + docs.getNumFound() + " hits on \"" + q.toString() + "\".</p>"); out.println("<table>"); String uq = q; try { uq = URLEncoder.encode(q, "UTF-8"); } catch (Exception e) { } for (SolrDocument doc : docs) { StringBuilder row = new StringBuilder("<tr class=\"result-record\"><td>"); row.append("<a href=\""); row.append("/biblio/"); row.append(((String) doc.getFieldValue("id"))); row.append("/?q="); row.append(uq); row.append("\">"); row.append(doc.getFieldValue("display")); row.append("</a>"); row.append("</td>"); row.append("</tr>"); out.print(row); } out.println("</table>"); if (docs.getNumFound() > rows) { out.println("<div id=\"pagination\">"); int pages = (int) Math.ceil((double) docs.getNumFound() / (double) rows); int p = 0; while (p < pages) { if ((p * rows) == start) { out.print("<div class=\"page current\">"); out.print((p + 1) + " "); out.print("</div>"); } else { StringBuilder plink = new StringBuilder(uq + "&start=" + p * rows + "&rows=" + rows); out.print("<div class=\"page\"><a href=\"/bibliosearch?q=" + plink + "\">" + (p + 1) + "</a></div>"); } p++; } out.println("</div>"); } } catch (SolrServerException e) { out.println("<p>Unable to execute query. Please try again.</p>"); throw new ServletException(e); } } else { out.println(line); } } } finally { out.close(); } } // <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code."> /** * Handles the HTTP <code>GET</code> method. * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } /** * Handles the HTTP <code>POST</code> method. * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } /** * Returns a short description of the servlet. * @return a String containing servlet description */ @Override public String getServletInfo() { return "Short description"; }// </editor-fold> }
true
true
protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html;charset=UTF-8"); PrintWriter out = response.getWriter(); BufferedReader reader = null; try { String q = request.getParameter("q"); reader = new BufferedReader(new InputStreamReader(searchURL.openStream())); String line = ""; while ((line = reader.readLine()) != null) { if (line.contains("<!-- Results -->") && !("".equals(q) || q == null)) { SolrServer solr = new CommonsHttpSolrServer(solrUrl + BiblioSearch); int rows = 30; try { rows = Integer.parseInt(request.getParameter("rows")); } catch (Exception e) { } int start = 0; try { start = Integer.parseInt(request.getParameter("start")); } catch (Exception e) {} SolrQuery sq = new SolrQuery(); try { sq.setQuery(q + "AND NOT id:http*"); sq.setStart(start); sq.setRows(rows); sq.addSortField("date", SolrQuery.ORDER.asc); sq.addSortField("sort", SolrQuery.ORDER.asc); QueryRequest req = new QueryRequest(sq); req.setMethod(METHOD.POST); QueryResponse rs = req.process(solr); SolrDocumentList docs = rs.getResults(); out.println("<p>" + docs.getNumFound() + " hits on \"" + q.toString() + "\".</p>"); out.println("<table>"); String uq = q; try { uq = URLEncoder.encode(q, "UTF-8"); } catch (Exception e) { } for (SolrDocument doc : docs) { StringBuilder row = new StringBuilder("<tr class=\"result-record\"><td>"); row.append("<a href=\""); row.append("/biblio/"); row.append(((String) doc.getFieldValue("id"))); row.append("/?q="); row.append(uq); row.append("\">"); row.append(doc.getFieldValue("display")); row.append("</a>"); row.append("</td>"); row.append("</tr>"); out.print(row); } out.println("</table>"); if (docs.getNumFound() > rows) { out.println("<div id=\"pagination\">"); int pages = (int) Math.ceil((double) docs.getNumFound() / (double) rows); int p = 0; while (p < pages) { if ((p * rows) == start) { out.print("<div class=\"page current\">"); out.print((p + 1) + " "); out.print("</div>"); } else { StringBuilder plink = new StringBuilder(uq + "&start=" + p * rows + "&rows=" + rows); out.print("<div class=\"page\"><a href=\"/bibliosearch?q=" + plink + "\">" + (p + 1) + "</a></div>"); } p++; } out.println("</div>"); } } catch (SolrServerException e) { out.println("<p>Unable to execute query. Please try again.</p>"); throw new ServletException(e); } } else { out.println(line); } } } finally { out.close(); } }
protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html;charset=UTF-8"); PrintWriter out = response.getWriter(); BufferedReader reader = null; try { String q = request.getParameter("q"); reader = new BufferedReader(new InputStreamReader(searchURL.openStream())); String line = ""; while ((line = reader.readLine()) != null) { if (line.contains("<!-- Results -->") && !("".equals(q) || q == null)) { SolrServer solr = new CommonsHttpSolrServer(solrUrl + BiblioSearch); int rows = 30; try { rows = Integer.parseInt(request.getParameter("rows")); } catch (Exception e) { } int start = 0; try { start = Integer.parseInt(request.getParameter("start")); } catch (Exception e) {} SolrQuery sq = new SolrQuery(); try { sq.setQuery(q); sq.setStart(start); sq.setRows(rows); sq.addSortField("date", SolrQuery.ORDER.asc); sq.addSortField("sort", SolrQuery.ORDER.asc); QueryRequest req = new QueryRequest(sq); req.setMethod(METHOD.POST); QueryResponse rs = req.process(solr); SolrDocumentList docs = rs.getResults(); out.println("<p>" + docs.getNumFound() + " hits on \"" + q.toString() + "\".</p>"); out.println("<table>"); String uq = q; try { uq = URLEncoder.encode(q, "UTF-8"); } catch (Exception e) { } for (SolrDocument doc : docs) { StringBuilder row = new StringBuilder("<tr class=\"result-record\"><td>"); row.append("<a href=\""); row.append("/biblio/"); row.append(((String) doc.getFieldValue("id"))); row.append("/?q="); row.append(uq); row.append("\">"); row.append(doc.getFieldValue("display")); row.append("</a>"); row.append("</td>"); row.append("</tr>"); out.print(row); } out.println("</table>"); if (docs.getNumFound() > rows) { out.println("<div id=\"pagination\">"); int pages = (int) Math.ceil((double) docs.getNumFound() / (double) rows); int p = 0; while (p < pages) { if ((p * rows) == start) { out.print("<div class=\"page current\">"); out.print((p + 1) + " "); out.print("</div>"); } else { StringBuilder plink = new StringBuilder(uq + "&start=" + p * rows + "&rows=" + rows); out.print("<div class=\"page\"><a href=\"/bibliosearch?q=" + plink + "\">" + (p + 1) + "</a></div>"); } p++; } out.println("</div>"); } } catch (SolrServerException e) { out.println("<p>Unable to execute query. Please try again.</p>"); throw new ServletException(e); } } else { out.println(line); } } } finally { out.close(); } }
diff --git a/cyklotron-ui/src/main/java/net/cyklotron/cms/modules/actions/poll/SendVote.java b/cyklotron-ui/src/main/java/net/cyklotron/cms/modules/actions/poll/SendVote.java index 1f0f31c02..80c00795b 100644 --- a/cyklotron-ui/src/main/java/net/cyklotron/cms/modules/actions/poll/SendVote.java +++ b/cyklotron-ui/src/main/java/net/cyklotron/cms/modules/actions/poll/SendVote.java @@ -1,173 +1,172 @@ package net.cyklotron.cms.modules.actions.poll; import java.util.HashMap; import java.util.Map; import java.util.Set; import javax.servlet.http.Cookie; import javax.servlet.http.HttpSession; import org.jcontainer.dna.Logger; import org.objectledge.context.Context; import org.objectledge.coral.security.Subject; import org.objectledge.coral.session.CoralSession; import org.objectledge.coral.store.Resource; import org.objectledge.i18n.I18nContext; import org.objectledge.parameters.Parameters; import org.objectledge.parameters.RequestParameters; import org.objectledge.pipeline.ProcessingException; import org.objectledge.templating.Template; import org.objectledge.templating.TemplatingContext; import org.objectledge.utils.StackTrace; import org.objectledge.web.HttpContext; import org.objectledge.web.captcha.CaptchaService; import org.objectledge.web.mvc.MVCContext; import net.cyklotron.cms.CmsData; import net.cyklotron.cms.CmsDataFactory; import net.cyklotron.cms.confirmation.EmailConfirmationService; import net.cyklotron.cms.documents.LinkRenderer; import net.cyklotron.cms.poll.AnswerResource; import net.cyklotron.cms.poll.PollService; import net.cyklotron.cms.poll.VoteResource; import net.cyklotron.cms.poll.VoteResourceImpl; import net.cyklotron.cms.structure.StructureService; import net.cyklotron.cms.util.OfflineLinkRenderingService; import net.cyklotron.cms.workflow.WorkflowService; /** * @author <a href="mailo:[email protected]">Pawel Potempski</a> * @version $Id: RespondPoll.java,v 1.7 2007-02-25 14:14:49 pablo Exp $ */ public class SendVote extends BasePollAction { private EmailConfirmationService emailConfirmationRequestService; private CaptchaService captchaService; private final OfflineLinkRenderingService linkRenderingService; public SendVote(Logger logger, StructureService structureService, CmsDataFactory cmsDataFactory, PollService pollService, WorkflowService workflowService, EmailConfirmationService emailConfirmationRequestService, CaptchaService captchaService, OfflineLinkRenderingService linkRenderingService) { super(logger, structureService, cmsDataFactory, pollService, workflowService); this.emailConfirmationRequestService = emailConfirmationRequestService; this.captchaService = captchaService; this.linkRenderingService = linkRenderingService; } /** * Performs the action. */ public void execute(Context context, Parameters parameters, MVCContext mvcContext, TemplatingContext templatingContext, HttpContext httpContext, CoralSession coralSession) throws ProcessingException { HttpSession session = httpContext.getRequest().getSession(); CmsData cmsData = cmsDataFactory.getCmsData(context); Parameters screenConfig = cmsData.getEmbeddedScreenConfig(); if(session == null || session.isNew()) { templatingContext.put("result", "new_session"); return; } Subject subject = coralSession.getUserSubject(); int vid = parameters.getInt("vid", -1); if(vid == -1) { throw new ProcessingException("Vote id not found"); } Long answerId = parameters.getLong("answer", -1); if(answerId == -1) { templatingContext.put("result", "answer_not_found"); return; } String email = parameters.get("email", ""); if(!email.matches("[a-zA-Z0-9.-_]+@[a-zA-Z0-9.-_]+.[a-zA-Z]{1,4}")) { templatingContext.put("result", "invalid_email"); return; } if(screenConfig.getBoolean("add_captcha", false) && !captchaService.checkCaptcha(httpContext, (RequestParameters)parameters)) { templatingContext.put("result", "invalid_captcha_verification"); return; } try { VoteResource voteResource = VoteResourceImpl.getVoteResource(coralSession, vid); Set<String> voteEmails = pollService.getBallotsEmails(coralSession, voteResource); if(pollService.hasVoted(httpContext, templatingContext, voteResource) || voteEmails.contains(email)) { templatingContext.put("already_voted", Boolean.TRUE); templatingContext.put("result", "already_responded"); return; } Resource[] answersResources = coralSession.getStore().getResource(voteResource); for(int i = 0; i < answersResources.length; i++) { AnswerResource answerResource = (AnswerResource)answersResources[i]; if(answerId.equals(answerResource.getId())) { String confirmationRequest = emailConfirmationRequestService .createEmailConfirmationRequest(coralSession, email, answerId.toString()); I18nContext i18nContext = I18nContext.getI18nContext(context); Template template = pollService.getVoteConfiramationTicketTemplate( voteResource, i18nContext.getLocale()); LinkRenderer linkRenderer = linkRenderingService.getLinkRenderer(); Map<String, Object> entries = new HashMap<String, Object>(); entries.put("vote", voteResource); emailConfirmationRequestService.sendConfirmationRequest(confirmationRequest, voteResource.getSenderAddress(), email, entries, cmsData.getNode(), template, "PLAIN", linkRenderer, coralSession); setCookie(httpContext, vid, answerId); break; } } } catch(Exception e) { templatingContext.put("result", "exception"); - templatingContext.put("trace", new StackTrace(e)); logger.error("Exception in poll,SendVote action", e); return; } templatingContext.put("result", "responded_successfully"); templatingContext.put("already_voted", Boolean.TRUE); } private void setCookie(HttpContext httpContext, Integer vid, Long answerId) { String cookieKey = "vote_" + vid; Cookie cookie = new Cookie(cookieKey, answerId.toString()); cookie.setMaxAge(30 * 24 * 3600); cookie.setPath("/"); httpContext.getResponse().addCookie(cookie); } public boolean checkAccessRights(Context context) throws ProcessingException { CmsData cmsData = cmsDataFactory.getCmsData(context); if(!cmsData.isApplicationEnabled("poll")) { logger.debug("Application 'poll' not enabled in site"); return false; } return true; } }
true
true
public void execute(Context context, Parameters parameters, MVCContext mvcContext, TemplatingContext templatingContext, HttpContext httpContext, CoralSession coralSession) throws ProcessingException { HttpSession session = httpContext.getRequest().getSession(); CmsData cmsData = cmsDataFactory.getCmsData(context); Parameters screenConfig = cmsData.getEmbeddedScreenConfig(); if(session == null || session.isNew()) { templatingContext.put("result", "new_session"); return; } Subject subject = coralSession.getUserSubject(); int vid = parameters.getInt("vid", -1); if(vid == -1) { throw new ProcessingException("Vote id not found"); } Long answerId = parameters.getLong("answer", -1); if(answerId == -1) { templatingContext.put("result", "answer_not_found"); return; } String email = parameters.get("email", ""); if(!email.matches("[a-zA-Z0-9.-_]+@[a-zA-Z0-9.-_]+.[a-zA-Z]{1,4}")) { templatingContext.put("result", "invalid_email"); return; } if(screenConfig.getBoolean("add_captcha", false) && !captchaService.checkCaptcha(httpContext, (RequestParameters)parameters)) { templatingContext.put("result", "invalid_captcha_verification"); return; } try { VoteResource voteResource = VoteResourceImpl.getVoteResource(coralSession, vid); Set<String> voteEmails = pollService.getBallotsEmails(coralSession, voteResource); if(pollService.hasVoted(httpContext, templatingContext, voteResource) || voteEmails.contains(email)) { templatingContext.put("already_voted", Boolean.TRUE); templatingContext.put("result", "already_responded"); return; } Resource[] answersResources = coralSession.getStore().getResource(voteResource); for(int i = 0; i < answersResources.length; i++) { AnswerResource answerResource = (AnswerResource)answersResources[i]; if(answerId.equals(answerResource.getId())) { String confirmationRequest = emailConfirmationRequestService .createEmailConfirmationRequest(coralSession, email, answerId.toString()); I18nContext i18nContext = I18nContext.getI18nContext(context); Template template = pollService.getVoteConfiramationTicketTemplate( voteResource, i18nContext.getLocale()); LinkRenderer linkRenderer = linkRenderingService.getLinkRenderer(); Map<String, Object> entries = new HashMap<String, Object>(); entries.put("vote", voteResource); emailConfirmationRequestService.sendConfirmationRequest(confirmationRequest, voteResource.getSenderAddress(), email, entries, cmsData.getNode(), template, "PLAIN", linkRenderer, coralSession); setCookie(httpContext, vid, answerId); break; } } } catch(Exception e) { templatingContext.put("result", "exception"); templatingContext.put("trace", new StackTrace(e)); logger.error("Exception in poll,SendVote action", e); return; } templatingContext.put("result", "responded_successfully"); templatingContext.put("already_voted", Boolean.TRUE); }
public void execute(Context context, Parameters parameters, MVCContext mvcContext, TemplatingContext templatingContext, HttpContext httpContext, CoralSession coralSession) throws ProcessingException { HttpSession session = httpContext.getRequest().getSession(); CmsData cmsData = cmsDataFactory.getCmsData(context); Parameters screenConfig = cmsData.getEmbeddedScreenConfig(); if(session == null || session.isNew()) { templatingContext.put("result", "new_session"); return; } Subject subject = coralSession.getUserSubject(); int vid = parameters.getInt("vid", -1); if(vid == -1) { throw new ProcessingException("Vote id not found"); } Long answerId = parameters.getLong("answer", -1); if(answerId == -1) { templatingContext.put("result", "answer_not_found"); return; } String email = parameters.get("email", ""); if(!email.matches("[a-zA-Z0-9.-_]+@[a-zA-Z0-9.-_]+.[a-zA-Z]{1,4}")) { templatingContext.put("result", "invalid_email"); return; } if(screenConfig.getBoolean("add_captcha", false) && !captchaService.checkCaptcha(httpContext, (RequestParameters)parameters)) { templatingContext.put("result", "invalid_captcha_verification"); return; } try { VoteResource voteResource = VoteResourceImpl.getVoteResource(coralSession, vid); Set<String> voteEmails = pollService.getBallotsEmails(coralSession, voteResource); if(pollService.hasVoted(httpContext, templatingContext, voteResource) || voteEmails.contains(email)) { templatingContext.put("already_voted", Boolean.TRUE); templatingContext.put("result", "already_responded"); return; } Resource[] answersResources = coralSession.getStore().getResource(voteResource); for(int i = 0; i < answersResources.length; i++) { AnswerResource answerResource = (AnswerResource)answersResources[i]; if(answerId.equals(answerResource.getId())) { String confirmationRequest = emailConfirmationRequestService .createEmailConfirmationRequest(coralSession, email, answerId.toString()); I18nContext i18nContext = I18nContext.getI18nContext(context); Template template = pollService.getVoteConfiramationTicketTemplate( voteResource, i18nContext.getLocale()); LinkRenderer linkRenderer = linkRenderingService.getLinkRenderer(); Map<String, Object> entries = new HashMap<String, Object>(); entries.put("vote", voteResource); emailConfirmationRequestService.sendConfirmationRequest(confirmationRequest, voteResource.getSenderAddress(), email, entries, cmsData.getNode(), template, "PLAIN", linkRenderer, coralSession); setCookie(httpContext, vid, answerId); break; } } } catch(Exception e) { templatingContext.put("result", "exception"); logger.error("Exception in poll,SendVote action", e); return; } templatingContext.put("result", "responded_successfully"); templatingContext.put("already_voted", Boolean.TRUE); }
diff --git a/src/org/nbphpcouncil/modules/php/yii/YiiPhpModuleExtender.java b/src/org/nbphpcouncil/modules/php/yii/YiiPhpModuleExtender.java index 7bff558..974b8fe 100644 --- a/src/org/nbphpcouncil/modules/php/yii/YiiPhpModuleExtender.java +++ b/src/org/nbphpcouncil/modules/php/yii/YiiPhpModuleExtender.java @@ -1,109 +1,115 @@ /* * add license */ package org.nbphpcouncil.modules.php.yii; import java.io.IOException; import java.util.HashSet; import java.util.Set; import java.util.logging.Logger; import javax.swing.JComponent; import javax.swing.event.ChangeListener; import org.nbphpcouncil.modules.php.yii.commands.YiiScript; import org.nbphpcouncil.modules.php.yii.ui.options.YiiOptions; import org.nbphpcouncil.modules.php.yii.ui.wizards.NewProjectConfigurationPanel; import org.nbphpcouncil.modules.php.yii.util.ProjectPropertiesSupport; import org.nbphpcouncil.modules.php.yii.util.YiiUtils; import org.netbeans.modules.php.api.executable.InvalidPhpExecutableException; import org.netbeans.modules.php.api.phpmodule.PhpModule; import org.netbeans.modules.php.spi.framework.PhpModuleExtender; import org.openide.filesystems.FileObject; import org.openide.util.Exceptions; import org.openide.util.HelpCtx; /** * * @author junichi11 */ public class YiiPhpModuleExtender extends PhpModuleExtender { private NewProjectConfigurationPanel panel; private static final Logger LOGGER = Logger.getLogger(YiiPhpModuleExtender.class.getName()); @Override public void addChangeListener(ChangeListener listener) { } @Override public void removeChangeListener(ChangeListener listener) { } @Override public JComponent getComponent() { return getPanel(); } @Override public HelpCtx getHelp() { return HelpCtx.DEFAULT_HELP; } @Override public boolean isValid() { String yiic = YiiOptions.getInstance().getYiiScript(); if (yiic == null || yiic.isEmpty()) { return false; } return true; } @Override public String getErrorMessage() { return null; } @Override public String getWarningMessage() { return null; } @Override public Set<FileObject> extend(PhpModule phpModule) throws ExtendingException { boolean isSuccess = false; try { YiiScript script = YiiScript.forPhpModule(phpModule, true); isSuccess = script.initProject(phpModule); } catch (InvalidPhpExecutableException ex) { Exceptions.printStackTrace(ex); } boolean usePHPUnit = panel.usePHPUnit(); HashSet<FileObject> files = new HashSet<FileObject>(); if (isSuccess) { // set PHPUnit Test if (usePHPUnit) { ProjectPropertiesSupport.setPHPUnit(phpModule); } try { YiiUtils.createCodeCompletionFile(phpModule); } catch (IOException ex) { Exceptions.printStackTrace(ex); } - FileObject projectDirectory = phpModule.getProjectDirectory(); + FileObject sourceDirectory = phpModule.getSourceDirectory(); FileObject config = null; - if (projectDirectory != null) { - config = projectDirectory.getFileObject("protected/config/main.php"); // NOI18N + FileObject index = null; + if (sourceDirectory != null) { + sourceDirectory.refresh(); + index = sourceDirectory.getFileObject("index.php"); // NOI18N + config = sourceDirectory.getFileObject("protected/config/main.php"); // NOI18N + } + if (index != null) { + files.add(index); } if (config != null) { files.add(config); } } return files; } private synchronized NewProjectConfigurationPanel getPanel() { if (panel == null) { panel = new NewProjectConfigurationPanel(); } return panel; } }
false
true
public Set<FileObject> extend(PhpModule phpModule) throws ExtendingException { boolean isSuccess = false; try { YiiScript script = YiiScript.forPhpModule(phpModule, true); isSuccess = script.initProject(phpModule); } catch (InvalidPhpExecutableException ex) { Exceptions.printStackTrace(ex); } boolean usePHPUnit = panel.usePHPUnit(); HashSet<FileObject> files = new HashSet<FileObject>(); if (isSuccess) { // set PHPUnit Test if (usePHPUnit) { ProjectPropertiesSupport.setPHPUnit(phpModule); } try { YiiUtils.createCodeCompletionFile(phpModule); } catch (IOException ex) { Exceptions.printStackTrace(ex); } FileObject projectDirectory = phpModule.getProjectDirectory(); FileObject config = null; if (projectDirectory != null) { config = projectDirectory.getFileObject("protected/config/main.php"); // NOI18N } if (config != null) { files.add(config); } } return files; }
public Set<FileObject> extend(PhpModule phpModule) throws ExtendingException { boolean isSuccess = false; try { YiiScript script = YiiScript.forPhpModule(phpModule, true); isSuccess = script.initProject(phpModule); } catch (InvalidPhpExecutableException ex) { Exceptions.printStackTrace(ex); } boolean usePHPUnit = panel.usePHPUnit(); HashSet<FileObject> files = new HashSet<FileObject>(); if (isSuccess) { // set PHPUnit Test if (usePHPUnit) { ProjectPropertiesSupport.setPHPUnit(phpModule); } try { YiiUtils.createCodeCompletionFile(phpModule); } catch (IOException ex) { Exceptions.printStackTrace(ex); } FileObject sourceDirectory = phpModule.getSourceDirectory(); FileObject config = null; FileObject index = null; if (sourceDirectory != null) { sourceDirectory.refresh(); index = sourceDirectory.getFileObject("index.php"); // NOI18N config = sourceDirectory.getFileObject("protected/config/main.php"); // NOI18N } if (index != null) { files.add(index); } if (config != null) { files.add(config); } } return files; }
diff --git a/org/postgresql/jdbc2/DatabaseMetaData.java b/org/postgresql/jdbc2/DatabaseMetaData.java index 3bde0b1..f418d62 100644 --- a/org/postgresql/jdbc2/DatabaseMetaData.java +++ b/org/postgresql/jdbc2/DatabaseMetaData.java @@ -1,3247 +1,3247 @@ package org.postgresql.jdbc2; // IMPORTANT NOTE: This file implements the JDBC 2 version of the driver. // If you make any modifications to this file, you must make sure that the // changes are also made (if relevent) to the related JDBC 1 class in the // org.postgresql.jdbc1 package. import java.sql.*; import java.util.*; import org.postgresql.Driver; import org.postgresql.Field; import org.postgresql.util.PSQLException; /* * This class provides information about the database as a whole. * * $Id$ * * <p>Many of the methods here return lists of information in ResultSets. You * can use the normal ResultSet methods such as getString and getInt to * retrieve the data from these ResultSets. If a given form of metadata is * not available, these methods should throw a SQLException. * * <p>Some of these methods take arguments that are String patterns. These * arguments all have names such as fooPattern. Within a pattern String, * "%" means match any substring of 0 or more characters, and "_" means * match any one character. Only metadata entries matching the search * pattern are returned. if a search pattern argument is set to a null * ref, it means that argument's criteria should be dropped from the * search. * * <p>A SQLException will be throws if a driver does not support a meta * data method. In the case of methods that return a ResultSet, either * a ResultSet (which may be empty) is returned or a SQLException is * thrown. * * @see java.sql.DatabaseMetaData */ public class DatabaseMetaData implements java.sql.DatabaseMetaData { Connection connection; // The connection association // These define various OID's. Hopefully they will stay constant. static final int iVarcharOid = 1043; // OID for varchar static final int iBoolOid = 16; // OID for bool static final int iInt2Oid = 21; // OID for int2 static final int iInt4Oid = 23; // OID for int4 static final int VARHDRSZ = 4; // length for int4 public DatabaseMetaData(Connection conn) { this.connection = conn; } /* * Can all the procedures returned by getProcedures be called * by the current user? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean allProceduresAreCallable() throws SQLException { if (Driver.logDebug) Driver.debug("allProceduresAreCallable"); return true; // For now... } /* * Can all the tables returned by getTable be SELECTed by * the current user? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean allTablesAreSelectable() throws SQLException { if (Driver.logDebug) Driver.debug("allTablesAreSelectable"); return true; // For now... } /* * What is the URL for this database? * * @return the url or null if it cannott be generated * @exception SQLException if a database access error occurs */ public String getURL() throws SQLException { String url = connection.getURL(); if (Driver.logDebug) Driver.debug("getURL " + url); return url; } /* * What is our user name as known to the database? * * @return our database user name * @exception SQLException if a database access error occurs */ public String getUserName() throws SQLException { String userName = connection.getUserName(); if (Driver.logDebug) Driver.debug("getUserName " + userName); return userName; } /* * Is the database in read-only mode? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean isReadOnly() throws SQLException { boolean isReadOnly = connection.isReadOnly(); if (Driver.logDebug) Driver.debug("isReadOnly " + isReadOnly); return isReadOnly; } /* * Are NULL values sorted high? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean nullsAreSortedHigh() throws SQLException { boolean nullSortedHigh = connection.haveMinimumServerVersion("7.2"); if (Driver.logDebug) Driver.debug("nullsAreSortedHigh " + nullSortedHigh); return nullSortedHigh; } /* * Are NULL values sorted low? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean nullsAreSortedLow() throws SQLException { if (Driver.logDebug) Driver.debug("nullsAreSortedLow false"); return false; } /* * Are NULL values sorted at the start regardless of sort order? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean nullsAreSortedAtStart() throws SQLException { if (Driver.logDebug) Driver.debug("nullsAreSortedAtStart false"); return false; } /* * Are NULL values sorted at the end regardless of sort order? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean nullsAreSortedAtEnd() throws SQLException { boolean nullsAreSortedAtEnd = ! connection.haveMinimumServerVersion("7.2"); if (Driver.logDebug) Driver.debug("nullsAreSortedAtEnd " + nullsAreSortedAtEnd); return nullsAreSortedAtEnd; } /* * What is the name of this database product - we hope that it is * PostgreSQL, so we return that explicitly. * * @return the database product name * @exception SQLException if a database access error occurs */ public String getDatabaseProductName() throws SQLException { if (Driver.logDebug) Driver.debug("getDatabaseProductName PostgresSQL"); return "PostgreSQL"; } /* * What is the version of this database product. * * @return the database version * @exception SQLException if a database access error occurs */ public String getDatabaseProductVersion() throws SQLException { String versionNumber = connection.getDBVersionNumber(); if (Driver.logDebug) Driver.debug("getDatabaseProductVersion " + versionNumber); return versionNumber; } /* * What is the name of this JDBC driver? If we don't know this * we are doing something wrong! * * @return the JDBC driver name * @exception SQLException why? */ public String getDriverName() throws SQLException { String driverName = "PostgreSQL Native Driver"; if (Driver.logDebug) Driver.debug("getDriverName" + driverName); return driverName; } /* * What is the version string of this JDBC driver? Again, this is * static. * * @return the JDBC driver name. * @exception SQLException why? */ public String getDriverVersion() throws SQLException { String driverVersion = connection.this_driver.getVersion(); if (Driver.logDebug) Driver.debug("getDriverVersion " + driverVersion); return driverVersion; } /* * What is this JDBC driver's major version number? * * @return the JDBC driver major version */ public int getDriverMajorVersion() { int majorVersion = connection.this_driver.getMajorVersion(); if (Driver.logDebug) Driver.debug("getMajorVersion " + majorVersion); return majorVersion; } /* * What is this JDBC driver's minor version number? * * @return the JDBC driver minor version */ public int getDriverMinorVersion() { int minorVersion = connection.this_driver.getMinorVersion(); if (Driver.logDebug) Driver.debug("getMinorVersion " + minorVersion); return minorVersion; } /* * Does the database store tables in a local file? No - it * stores them in a file on the server. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean usesLocalFiles() throws SQLException { if (Driver.logDebug) Driver.debug("usesLocalFiles " + false); return false; } /* * Does the database use a file for each table? Well, not really, * since it doesnt use local files. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean usesLocalFilePerTable() throws SQLException { if (Driver.logDebug) Driver.debug("usesLocalFilePerTable " + false); return false; } /* * Does the database treat mixed case unquoted SQL identifiers * as case sensitive and as a result store them in mixed case? * A JDBC-Compliant driver will always return false. * * <p>Predicament - what do they mean by "SQL identifiers" - if it * means the names of the tables and columns, then the answers * given below are correct - otherwise I don't know. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsMixedCaseIdentifiers() throws SQLException { if (Driver.logDebug) Driver.debug("supportsMixedCaseIdentifiers " + false); return false; } /* * Does the database treat mixed case unquoted SQL identifiers as * case insensitive and store them in upper case? * * @return true if so */ public boolean storesUpperCaseIdentifiers() throws SQLException { if (Driver.logDebug) Driver.debug("storesUpperCaseIdentifiers " + false); return false; } /* * Does the database treat mixed case unquoted SQL identifiers as * case insensitive and store them in lower case? * * @return true if so */ public boolean storesLowerCaseIdentifiers() throws SQLException { if (Driver.logDebug) Driver.debug("storesLowerCaseIdentifiers " + true); return true; } /* * Does the database treat mixed case unquoted SQL identifiers as * case insensitive and store them in mixed case? * * @return true if so */ public boolean storesMixedCaseIdentifiers() throws SQLException { if (Driver.logDebug) Driver.debug("storesMixedCaseIdentifiers " + false); return false; } /* * Does the database treat mixed case quoted SQL identifiers as * case sensitive and as a result store them in mixed case? A * JDBC compliant driver will always return true. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException { if (Driver.logDebug) Driver.debug("supportsMixedCaseQuotedIdentifiers " + true); return true; } /* * Does the database treat mixed case quoted SQL identifiers as * case insensitive and store them in upper case? * * @return true if so */ public boolean storesUpperCaseQuotedIdentifiers() throws SQLException { if (Driver.logDebug) Driver.debug("storesUpperCaseQuotedIdentifiers " + false); return false; } /* * Does the database treat mixed case quoted SQL identifiers as case * insensitive and store them in lower case? * * @return true if so */ public boolean storesLowerCaseQuotedIdentifiers() throws SQLException { if (Driver.logDebug) Driver.debug("storesLowerCaseQuotedIdentifiers " + false); return false; } /* * Does the database treat mixed case quoted SQL identifiers as case * insensitive and store them in mixed case? * * @return true if so */ public boolean storesMixedCaseQuotedIdentifiers() throws SQLException { if (Driver.logDebug) Driver.debug("storesMixedCaseQuotedIdentifiers " + false); return false; } /* * What is the string used to quote SQL identifiers? This returns * a space if identifier quoting isn't supported. A JDBC Compliant * driver will always use a double quote character. * * @return the quoting string * @exception SQLException if a database access error occurs */ public String getIdentifierQuoteString() throws SQLException { if (Driver.logDebug) Driver.debug("getIdentifierQuoteString \"" ); return "\""; } /* * Get a comma separated list of all a database's SQL keywords that * are NOT also SQL92 keywords. * * <p>Within PostgreSQL, the keywords are found in * src/backend/parser/keywords.c * * <p>For SQL Keywords, I took the list provided at * <a href="http://web.dementia.org/~shadow/sql/sql3bnf.sep93.txt"> * http://web.dementia.org/~shadow/sql/sql3bnf.sep93.txt</a> * which is for SQL3, not SQL-92, but it is close enough for * this purpose. * * @return a comma separated list of keywords we use * @exception SQLException if a database access error occurs */ public String getSQLKeywords() throws SQLException { if (Driver.logDebug) Driver.debug("getSQLKeyWords"); return "abort,acl,add,aggregate,append,archive,arch_store,backward,binary,change,cluster,copy,database,delimiter,delimiters,do,extend,explain,forward,heavy,index,inherits,isnull,light,listen,load,merge,nothing,notify,notnull,oids,purge,rename,replace,retrieve,returns,rule,recipe,setof,stdin,stdout,store,vacuum,verbose,version"; } public String getNumericFunctions() throws SQLException { // XXX-Not Implemented if (Driver.logDebug) Driver.debug("getNumericFunctions"); return ""; } public String getStringFunctions() throws SQLException { // XXX-Not Implemented if (Driver.logDebug) Driver.debug("getStringFunctions"); return ""; } public String getSystemFunctions() throws SQLException { // XXX-Not Implemented if (Driver.logDebug) Driver.debug("getSystemFunctions"); return ""; } public String getTimeDateFunctions() throws SQLException { // XXX-Not Implemented if (Driver.logDebug) Driver.debug("getTimeDateFunctions"); return ""; } /* * This is the string that can be used to escape '_' and '%' in * a search string pattern style catalog search parameters * * @return the string used to escape wildcard characters * @exception SQLException if a database access error occurs */ public String getSearchStringEscape() throws SQLException { if (Driver.logDebug) Driver.debug("getSearchStringEscape"); return "\\"; } /* * Get all the "extra" characters that can be used in unquoted * identifier names (those beyond a-zA-Z0-9 and _) * * <p>From the file src/backend/parser/scan.l, an identifier is * {letter}{letter_or_digit} which makes it just those listed * above. * * @return a string containing the extra characters * @exception SQLException if a database access error occurs */ public String getExtraNameCharacters() throws SQLException { if (Driver.logDebug) Driver.debug("getExtraNameCharacters"); return ""; } /* * Is "ALTER TABLE" with an add column supported? * Yes for PostgreSQL 6.1 * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsAlterTableWithAddColumn() throws SQLException { if (Driver.logDebug) Driver.debug("supportsAlterTableWithAddColumn " + true); return true; } /* * Is "ALTER TABLE" with a drop column supported? * Peter 10/10/2000 This was set to true, but 7.1devel doesn't support it! * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsAlterTableWithDropColumn() throws SQLException { if (Driver.logDebug) Driver.debug("supportsAlterTableWithDropColumn " + false); return false; } /* * Is column aliasing supported? * * <p>If so, the SQL AS clause can be used to provide names for * computed columns or to provide alias names for columns as * required. A JDBC Compliant driver always returns true. * * <p>e.g. * * <br><pre> * select count(C) as C_COUNT from T group by C; * * </pre><br> * should return a column named as C_COUNT instead of count(C) * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsColumnAliasing() throws SQLException { if (Driver.logDebug) Driver.debug("supportsColumnAliasing " + true); return true; } /* * Are concatenations between NULL and non-NULL values NULL? A * JDBC Compliant driver always returns true * * @return true if so * @exception SQLException if a database access error occurs */ public boolean nullPlusNonNullIsNull() throws SQLException { if (Driver.logDebug) Driver.debug("nullPlusNonNullIsNull " + true); return true; } public boolean supportsConvert() throws SQLException { // XXX-Not Implemented if (Driver.logDebug) Driver.debug("supportsConvert " + false); return false; } public boolean supportsConvert(int fromType, int toType) throws SQLException { // XXX-Not Implemented if (Driver.logDebug) Driver.debug("supportsConvert " + false); return false; } /* * Are table correlation names supported? A JDBC Compliant * driver always returns true. * * @return true if so; false otherwise * @exception SQLException - if a database access error occurs */ public boolean supportsTableCorrelationNames() throws SQLException { if (Driver.logDebug) Driver.debug("supportsTableCorrelationNames " + true); return true; } /* * If table correlation names are supported, are they restricted to * be different from the names of the tables? * * @return true if so; false otherwise * @exception SQLException - if a database access error occurs */ public boolean supportsDifferentTableCorrelationNames() throws SQLException { if (Driver.logDebug) Driver.debug("supportsDifferentTableCorrelationNames " + false); return false; } /* * Are expressions in "ORDER BY" lists supported? * * <br>e.g. select * from t order by a + b; * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsExpressionsInOrderBy() throws SQLException { if (Driver.logDebug) Driver.debug("supportsExpressionsInOrderBy " + true); return true; } /* * Can an "ORDER BY" clause use columns not in the SELECT? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsOrderByUnrelated() throws SQLException { boolean supportsOrderByUnrelated = connection.haveMinimumServerVersion("6.4"); if (Driver.logDebug) Driver.debug("supportsOrderByUnrelated " + supportsOrderByUnrelated); return supportsOrderByUnrelated; } /* * Is some form of "GROUP BY" clause supported? * I checked it, and yes it is. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsGroupBy() throws SQLException { if (Driver.logDebug) Driver.debug("supportsGroupBy " + true); return true; } /* * Can a "GROUP BY" clause use columns not in the SELECT? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsGroupByUnrelated() throws SQLException { boolean supportsGroupByUnrelated = connection.haveMinimumServerVersion("6.4"); if (Driver.logDebug) Driver.debug("supportsGroupByUnrelated " + supportsGroupByUnrelated); return supportsGroupByUnrelated; } /* * Can a "GROUP BY" clause add columns not in the SELECT provided * it specifies all the columns in the SELECT? Does anyone actually * understand what they mean here? * * (I think this is a subset of the previous function. -- petere) * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsGroupByBeyondSelect() throws SQLException { boolean supportsGroupByBeyondSelect = connection.haveMinimumServerVersion("6.4"); if (Driver.logDebug) Driver.debug("supportsGroupByUnrelated " + supportsGroupByBeyondSelect); return supportsGroupByBeyondSelect; } /* * Is the escape character in "LIKE" clauses supported? A * JDBC compliant driver always returns true. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsLikeEscapeClause() throws SQLException { boolean supportsLikeEscapeClause = connection.haveMinimumServerVersion("7.1"); if (Driver.logDebug) Driver.debug("supportsLikeEscapeClause " + supportsLikeEscapeClause); return supportsLikeEscapeClause; } /* * Are multiple ResultSets from a single execute supported? * Well, I implemented it, but I dont think this is possible from * the back ends point of view. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsMultipleResultSets() throws SQLException { if (Driver.logDebug) Driver.debug("supportsMultipleResultSets " + false); return false; } /* * Can we have multiple transactions open at once (on different * connections?) * I guess we can have, since Im relying on it. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsMultipleTransactions() throws SQLException { if (Driver.logDebug) Driver.debug("supportsMultipleTransactions " + true); return true; } /* * Can columns be defined as non-nullable. A JDBC Compliant driver * always returns true. * * <p>This changed from false to true in v6.2 of the driver, as this * support was added to the backend. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsNonNullableColumns() throws SQLException { if (Driver.logDebug) Driver.debug("supportsNonNullableColumns true"); return true; } /* * Does this driver support the minimum ODBC SQL grammar. This * grammar is defined at: * * <p><a href="http://www.microsoft.com/msdn/sdk/platforms/doc/odbc/src/intropr.htm">http://www.microsoft.com/msdn/sdk/platforms/doc/odbc/src/intropr.htm</a> * * <p>In Appendix C. From this description, we seem to support the * ODBC minimal (Level 0) grammar. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsMinimumSQLGrammar() throws SQLException { if (Driver.logDebug) Driver.debug("supportsMinimumSQLGrammar TRUE"); return true; } /* * Does this driver support the Core ODBC SQL grammar. We need * SQL-92 conformance for this. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsCoreSQLGrammar() throws SQLException { if (Driver.logDebug) Driver.debug("supportsCoreSQLGrammar FALSE "); return false; } /* * Does this driver support the Extended (Level 2) ODBC SQL * grammar. We don't conform to the Core (Level 1), so we can't * conform to the Extended SQL Grammar. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsExtendedSQLGrammar() throws SQLException { if (Driver.logDebug) Driver.debug("supportsExtendedSQLGrammar FALSE"); return false; } /* * Does this driver support the ANSI-92 entry level SQL grammar? * All JDBC Compliant drivers must return true. We currently * report false until 'schema' support is added. Then this * should be changed to return true, since we will be mostly * compliant (probably more compliant than many other databases) * And since this is a requirement for all JDBC drivers we * need to get to the point where we can return true. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsANSI92EntryLevelSQL() throws SQLException { boolean schemas = connection.haveMinimumServerVersion("7.3"); if (Driver.logDebug) Driver.debug("supportsANSI92EntryLevelSQL " + schemas); return schemas; } /* * Does this driver support the ANSI-92 intermediate level SQL * grammar? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsANSI92IntermediateSQL() throws SQLException { if (Driver.logDebug) Driver.debug("supportsANSI92IntermediateSQL false "); return false; } /* * Does this driver support the ANSI-92 full SQL grammar? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsANSI92FullSQL() throws SQLException { if (Driver.logDebug) Driver.debug("supportsANSI92FullSQL false "); return false; } /* * Is the SQL Integrity Enhancement Facility supported? * I haven't seen this mentioned anywhere, so I guess not * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsIntegrityEnhancementFacility() throws SQLException { if (Driver.logDebug) Driver.debug("supportsIntegrityEnhancementFacility false "); return false; } /* * Is some form of outer join supported? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsOuterJoins() throws SQLException { boolean supportsOuterJoins = connection.haveMinimumServerVersion("7.1"); if (Driver.logDebug) Driver.debug("supportsOuterJoins " + supportsOuterJoins); return supportsOuterJoins; } /* * Are full nexted outer joins supported? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsFullOuterJoins() throws SQLException { boolean supportsFullOuterJoins = connection.haveMinimumServerVersion("7.1"); if (Driver.logDebug) Driver.debug("supportsFullOuterJoins " + supportsFullOuterJoins); return supportsFullOuterJoins; } /* * Is there limited support for outer joins? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsLimitedOuterJoins() throws SQLException { boolean supportsLimitedOuterJoins = connection.haveMinimumServerVersion("7.1"); if (Driver.logDebug) Driver.debug("supportsFullOuterJoins " + supportsLimitedOuterJoins); return supportsLimitedOuterJoins; } /* * What is the database vendor's preferred term for "schema"? * PostgreSQL doesn't have schemas, but when it does, we'll use the * term "schema". * * @return the vendor term * @exception SQLException if a database access error occurs */ public String getSchemaTerm() throws SQLException { if (Driver.logDebug) Driver.debug("getSchemaTerm schema"); return "schema"; } /* * What is the database vendor's preferred term for "procedure"? * Traditionally, "function" has been used. * * @return the vendor term * @exception SQLException if a database access error occurs */ public String getProcedureTerm() throws SQLException { if (Driver.logDebug) Driver.debug("getProcedureTerm function "); return "function"; } /* * What is the database vendor's preferred term for "catalog"? * * @return the vendor term * @exception SQLException if a database access error occurs */ public String getCatalogTerm() throws SQLException { if (Driver.logDebug) Driver.debug("getCatalogTerm database "); return "database"; } /* * Does a catalog appear at the start of a qualified table name? * (Otherwise it appears at the end). * * @return true if so * @exception SQLException if a database access error occurs */ public boolean isCatalogAtStart() throws SQLException { // return true here; we return false for every other catalog function // so it won't matter what we return here D.C. if (Driver.logDebug) Driver.debug("isCatalogAtStart not implemented"); return true; } /* * What is the Catalog separator. * * @return the catalog separator string * @exception SQLException if a database access error occurs */ public String getCatalogSeparator() throws SQLException { // Give them something to work with here // everything else returns false so it won't matter what we return here D.C. if (Driver.logDebug) Driver.debug("getCatalogSeparator not implemented "); return "."; } /* * Can a schema name be used in a data manipulation statement? Nope. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsSchemasInDataManipulation() throws SQLException { if (Driver.logDebug) Driver.debug("supportsSchemasInDataManipulation false"); return false; } /* * Can a schema name be used in a procedure call statement? Nope. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsSchemasInProcedureCalls() throws SQLException { if (Driver.logDebug) Driver.debug("supportsSchemasInProcedureCalls false"); return false; } /* * Can a schema be used in a table definition statement? Nope. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsSchemasInTableDefinitions() throws SQLException { boolean schemas = connection.haveMinimumServerVersion("7.3"); if (Driver.logDebug) Driver.debug("supportsSchemasInTableDefinitions " + schemas); return schemas; } /* * Can a schema name be used in an index definition statement? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsSchemasInIndexDefinitions() throws SQLException { if (Driver.logDebug) Driver.debug("supportsSchemasInIndexDefinitions false"); return false; } /* * Can a schema name be used in a privilege definition statement? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsSchemasInPrivilegeDefinitions() throws SQLException { if (Driver.logDebug) Driver.debug("supportsSchemasInPrivilegeDefinitions false"); return false; } /* * Can a catalog name be used in a data manipulation statement? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsCatalogsInDataManipulation() throws SQLException { if (Driver.logDebug) Driver.debug("supportsCatalogsInDataManipulation false"); return false; } /* * Can a catalog name be used in a procedure call statement? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsCatalogsInProcedureCalls() throws SQLException { if (Driver.logDebug) Driver.debug("supportsCatalogsInDataManipulation false"); return false; } /* * Can a catalog name be used in a table definition statement? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsCatalogsInTableDefinitions() throws SQLException { if (Driver.logDebug) Driver.debug("supportsCatalogsInTableDefinitions false"); return false; } /* * Can a catalog name be used in an index definition? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsCatalogsInIndexDefinitions() throws SQLException { if (Driver.logDebug) Driver.debug("supportsCatalogsInIndexDefinitions false"); return false; } /* * Can a catalog name be used in a privilege definition statement? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsCatalogsInPrivilegeDefinitions() throws SQLException { if (Driver.logDebug) Driver.debug("supportsCatalogsInPrivilegeDefinitions false"); return false; } /* * We support cursors for gets only it seems. I dont see a method * to get a positioned delete. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsPositionedDelete() throws SQLException { if (Driver.logDebug) Driver.debug("supportsPositionedDelete false"); return false; // For now... } /* * Is positioned UPDATE supported? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsPositionedUpdate() throws SQLException { if (Driver.logDebug) Driver.debug("supportsPositionedUpdate false"); return false; // For now... } /* * Is SELECT for UPDATE supported? * * @return true if so; false otherwise * @exception SQLException - if a database access error occurs */ public boolean supportsSelectForUpdate() throws SQLException { return connection.haveMinimumServerVersion("6.5"); } /* * Are stored procedure calls using the stored procedure escape * syntax supported? * * @return true if so; false otherwise * @exception SQLException - if a database access error occurs */ public boolean supportsStoredProcedures() throws SQLException { return false; } /* * Are subqueries in comparison expressions supported? A JDBC * Compliant driver always returns true. * * @return true if so; false otherwise * @exception SQLException - if a database access error occurs */ public boolean supportsSubqueriesInComparisons() throws SQLException { return true; } /* * Are subqueries in 'exists' expressions supported? A JDBC * Compliant driver always returns true. * * @return true if so; false otherwise * @exception SQLException - if a database access error occurs */ public boolean supportsSubqueriesInExists() throws SQLException { return true; } /* * Are subqueries in 'in' statements supported? A JDBC * Compliant driver always returns true. * * @return true if so; false otherwise * @exception SQLException - if a database access error occurs */ public boolean supportsSubqueriesInIns() throws SQLException { return true; } /* * Are subqueries in quantified expressions supported? A JDBC * Compliant driver always returns true. * * (No idea what this is, but we support a good deal of * subquerying.) * * @return true if so; false otherwise * @exception SQLException - if a database access error occurs */ public boolean supportsSubqueriesInQuantifieds() throws SQLException { return true; } /* * Are correlated subqueries supported? A JDBC Compliant driver * always returns true. * * (a.k.a. subselect in from?) * * @return true if so; false otherwise * @exception SQLException - if a database access error occurs */ public boolean supportsCorrelatedSubqueries() throws SQLException { return connection.haveMinimumServerVersion("7.1"); } /* * Is SQL UNION supported? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsUnion() throws SQLException { return true; // since 6.3 } /* * Is SQL UNION ALL supported? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsUnionAll() throws SQLException { return connection.haveMinimumServerVersion("7.1"); } /* * In PostgreSQL, Cursors are only open within transactions. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsOpenCursorsAcrossCommit() throws SQLException { return false; } /* * Do we support open cursors across multiple transactions? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsOpenCursorsAcrossRollback() throws SQLException { return false; } /* * Can statements remain open across commits? They may, but * this driver cannot guarentee that. In further reflection. * we are talking a Statement object here, so the answer is * yes, since the Statement is only a vehicle to ExecSQL() * * @return true if they always remain open; false otherwise * @exception SQLException if a database access error occurs */ public boolean supportsOpenStatementsAcrossCommit() throws SQLException { return true; } /* * Can statements remain open across rollbacks? They may, but * this driver cannot guarentee that. In further contemplation, * we are talking a Statement object here, so the answer is yes, * since the Statement is only a vehicle to ExecSQL() in Connection * * @return true if they always remain open; false otherwise * @exception SQLException if a database access error occurs */ public boolean supportsOpenStatementsAcrossRollback() throws SQLException { return true; } /* * How many hex characters can you have in an inline binary literal * * @return the max literal length * @exception SQLException if a database access error occurs */ public int getMaxBinaryLiteralLength() throws SQLException { return 0; // no limit } /* * What is the maximum length for a character literal * I suppose it is 8190 (8192 - 2 for the quotes) * * @return the max literal length * @exception SQLException if a database access error occurs */ public int getMaxCharLiteralLength() throws SQLException { return 0; // no limit } /* * Whats the limit on column name length. The description of * pg_class would say '32' (length of pg_class.relname) - we * should probably do a query for this....but.... * * @return the maximum column name length * @exception SQLException if a database access error occurs */ public int getMaxColumnNameLength() throws SQLException { return 32; } /* * What is the maximum number of columns in a "GROUP BY" clause? * * @return the max number of columns * @exception SQLException if a database access error occurs */ public int getMaxColumnsInGroupBy() throws SQLException { return 0; // no limit } /* * What's the maximum number of columns allowed in an index? * 6.0 only allowed one column, but 6.1 introduced multi-column * indices, so, theoretically, its all of them. * * @return max number of columns * @exception SQLException if a database access error occurs */ public int getMaxColumnsInIndex() throws SQLException { return getMaxColumnsInTable(); } /* * What's the maximum number of columns in an "ORDER BY clause? * * @return the max columns * @exception SQLException if a database access error occurs */ public int getMaxColumnsInOrderBy() throws SQLException { return 0; // no limit } /* * What is the maximum number of columns in a "SELECT" list? * * @return the max columns * @exception SQLException if a database access error occurs */ public int getMaxColumnsInSelect() throws SQLException { return 0; // no limit } /* * What is the maximum number of columns in a table? From the * CREATE TABLE reference page... * * <p>"The new class is created as a heap with no initial data. A * class can have no more than 1600 attributes (realistically, * this is limited by the fact that tuple sizes must be less than * 8192 bytes)..." * * @return the max columns * @exception SQLException if a database access error occurs */ public int getMaxColumnsInTable() throws SQLException { return 1600; } /* * How many active connection can we have at a time to this * database? Well, since it depends on postmaster, which just * does a listen() followed by an accept() and fork(), its * basically very high. Unless the system runs out of processes, * it can be 65535 (the number of aux. ports on a TCP/IP system). * I will return 8192 since that is what even the largest system * can realistically handle, * * @return the maximum number of connections * @exception SQLException if a database access error occurs */ public int getMaxConnections() throws SQLException { return 8192; } /* * What is the maximum cursor name length (the same as all * the other F***** identifiers!) * * @return max cursor name length in bytes * @exception SQLException if a database access error occurs */ public int getMaxCursorNameLength() throws SQLException { return 32; } /* * Retrieves the maximum number of bytes for an index, including all * of the parts of the index. * * @return max index length in bytes, which includes the composite * of all the constituent parts of the index; a result of zero means * that there is no limit or the limit is not known * @exception SQLException if a database access error occurs */ public int getMaxIndexLength() throws SQLException { return 0; // no limit (larger than an int anyway) } public int getMaxSchemaNameLength() throws SQLException { // XXX-Not Implemented return 0; } /* * What is the maximum length of a procedure name? * (length of pg_proc.proname used) - again, I really * should do a query here to get it. * * @return the max name length in bytes * @exception SQLException if a database access error occurs */ public int getMaxProcedureNameLength() throws SQLException { return 32; } public int getMaxCatalogNameLength() throws SQLException { // XXX-Not Implemented return 0; } /* * What is the maximum length of a single row? * * @return max row size in bytes * @exception SQLException if a database access error occurs */ public int getMaxRowSize() throws SQLException { if (connection.haveMinimumServerVersion("7.1")) return 1073741824; // 1 GB else return 8192; // XXX could be altered } /* * Did getMaxRowSize() include LONGVARCHAR and LONGVARBINARY * blobs? We don't handle blobs yet * * @return true if so * @exception SQLException if a database access error occurs */ public boolean doesMaxRowSizeIncludeBlobs() throws SQLException { return false; } /* * What is the maximum length of a SQL statement? * * @return max length in bytes * @exception SQLException if a database access error occurs */ public int getMaxStatementLength() throws SQLException { if (connection.haveMinimumServerVersion("7.0")) return 0; // actually whatever fits in size_t else return 16384; } /* * How many active statements can we have open at one time to * this database? Basically, since each Statement downloads * the results as the query is executed, we can have many. However, * we can only really have one statement per connection going * at once (since they are executed serially) - so we return * one. * * @return the maximum * @exception SQLException if a database access error occurs */ public int getMaxStatements() throws SQLException { return 1; } /* * What is the maximum length of a table name? This was found * from pg_class.relname length * * @return max name length in bytes * @exception SQLException if a database access error occurs */ public int getMaxTableNameLength() throws SQLException { return 32; } /* * What is the maximum number of tables that can be specified * in a SELECT? * * @return the maximum * @exception SQLException if a database access error occurs */ public int getMaxTablesInSelect() throws SQLException { return 0; // no limit } /* * What is the maximum length of a user name? Well, we generally * use UNIX like user names in PostgreSQL, so I think this would * be 8. However, showing the schema for pg_user shows a length * for username of 32. * * @return the max name length in bytes * @exception SQLException if a database access error occurs */ public int getMaxUserNameLength() throws SQLException { return 32; } /* * What is the database's default transaction isolation level? We * do not support this, so all transactions are SERIALIZABLE. * * @return the default isolation level * @exception SQLException if a database access error occurs * @see Connection */ public int getDefaultTransactionIsolation() throws SQLException { return Connection.TRANSACTION_READ_COMMITTED; } /* * Are transactions supported? If not, commit and rollback are noops * and the isolation level is TRANSACTION_NONE. We do support * transactions. * * @return true if transactions are supported * @exception SQLException if a database access error occurs */ public boolean supportsTransactions() throws SQLException { return true; } /* * Does the database support the given transaction isolation level? * We only support TRANSACTION_SERIALIZABLE and TRANSACTION_READ_COMMITTED * * @param level the values are defined in java.sql.Connection * @return true if so * @exception SQLException if a database access error occurs * @see Connection */ public boolean supportsTransactionIsolationLevel(int level) throws SQLException { if (level == Connection.TRANSACTION_SERIALIZABLE || level == Connection.TRANSACTION_READ_COMMITTED) return true; else return false; } /* * Are both data definition and data manipulation transactions * supported? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsDataDefinitionAndDataManipulationTransactions() throws SQLException { return true; } /* * Are only data manipulation statements withing a transaction * supported? * * @return true if so * @exception SQLException if a database access error occurs */ public boolean supportsDataManipulationTransactionsOnly() throws SQLException { return false; } /* * Does a data definition statement within a transaction force * the transaction to commit? I think this means something like: * * <p><pre> * CREATE TABLE T (A INT); * INSERT INTO T (A) VALUES (2); * BEGIN; * UPDATE T SET A = A + 1; * CREATE TABLE X (A INT); * SELECT A FROM T INTO X; * COMMIT; * </pre><p> * * does the CREATE TABLE call cause a commit? The answer is no. * * @return true if so * @exception SQLException if a database access error occurs */ public boolean dataDefinitionCausesTransactionCommit() throws SQLException { return false; } /* * Is a data definition statement within a transaction ignored? * It seems to be (from experiment in previous method) * * @return true if so * @exception SQLException if a database access error occurs */ public boolean dataDefinitionIgnoredInTransactions() throws SQLException { return true; } /* * Get a description of stored procedures available in a catalog * * <p>Only procedure descriptions matching the schema and procedure * name criteria are returned. They are ordered by PROCEDURE_SCHEM * and PROCEDURE_NAME * * <p>Each procedure description has the following columns: * <ol> * <li><b>PROCEDURE_CAT</b> String => procedure catalog (may be null) * <li><b>PROCEDURE_SCHEM</b> String => procedure schema (may be null) * <li><b>PROCEDURE_NAME</b> String => procedure name * <li><b>Field 4</b> reserved (make it null) * <li><b>Field 5</b> reserved (make it null) * <li><b>Field 6</b> reserved (make it null) * <li><b>REMARKS</b> String => explanatory comment on the procedure * <li><b>PROCEDURE_TYPE</b> short => kind of procedure * <ul> * <li> procedureResultUnknown - May return a result * <li> procedureNoResult - Does not return a result * <li> procedureReturnsResult - Returns a result * </ul> * </ol> * * @param catalog - a catalog name; "" retrieves those without a * catalog; null means drop catalog name from criteria * @param schemaParrern - a schema name pattern; "" retrieves those * without a schema - we ignore this parameter * @param procedureNamePattern - a procedure name pattern * @return ResultSet - each row is a procedure description * @exception SQLException if a database access error occurs */ public java.sql.ResultSet getProcedures(String catalog, String schemaPattern, String procedureNamePattern) throws SQLException { // the field descriptors for the new ResultSet Field f[] = new Field[8]; java.sql.ResultSet r; // ResultSet for the SQL query that we need to do Vector v = new Vector(); // The new ResultSet tuple stuff f[0] = new Field(connection, "PROCEDURE_CAT", iVarcharOid, 32); f[1] = new Field(connection, "PROCEDURE_SCHEM", iVarcharOid, 32); f[2] = new Field(connection, "PROCEDURE_NAME", iVarcharOid, 32); f[3] = f[4] = f[5] = new Field(connection, "reserved", iVarcharOid, 32); // null; // reserved, must be null for now f[6] = new Field(connection, "REMARKS", iVarcharOid, 8192); f[7] = new Field(connection, "PROCEDURE_TYPE", iInt2Oid, 2); // If the pattern is null, then set it to the default if (procedureNamePattern == null) procedureNamePattern = "%"; r = connection.ExecSQL("select proname, proretset from pg_proc where proname like '" + procedureNamePattern.toLowerCase() + "' order by proname"); while (r.next()) { byte[][] tuple = new byte[8][0]; tuple[0] = null; // Catalog name tuple[1] = null; // Schema name tuple[2] = r.getBytes(1); // Procedure name tuple[3] = tuple[4] = tuple[5] = null; // Reserved tuple[6] = null; if (r.getBoolean(2)) tuple[7] = Integer.toString(java.sql.DatabaseMetaData.procedureReturnsResult).getBytes(); else tuple[7] = Integer.toString(java.sql.DatabaseMetaData.procedureNoResult).getBytes(); v.addElement(tuple); } return new ResultSet(connection, f, v, "OK", 1); } /* * Get a description of a catalog's stored procedure parameters * and result columns. * * <p>Only descriptions matching the schema, procedure and parameter * name criteria are returned. They are ordered by PROCEDURE_SCHEM * and PROCEDURE_NAME. Within this, the return value, if any, is * first. Next are the parameter descriptions in call order. The * column descriptions follow in column number order. * * <p>Each row in the ResultSet is a parameter description or column * description with the following fields: * <ol> * <li><b>PROCEDURE_CAT</b> String => procedure catalog (may be null) * <li><b>PROCEDURE_SCHE</b>M String => procedure schema (may be null) * <li><b>PROCEDURE_NAME</b> String => procedure name * <li><b>COLUMN_NAME</b> String => column/parameter name * <li><b>COLUMN_TYPE</b> Short => kind of column/parameter: * <ul><li>procedureColumnUnknown - nobody knows * <li>procedureColumnIn - IN parameter * <li>procedureColumnInOut - INOUT parameter * <li>procedureColumnOut - OUT parameter * <li>procedureColumnReturn - procedure return value * <li>procedureColumnResult - result column in ResultSet * </ul> * <li><b>DATA_TYPE</b> short => SQL type from java.sql.Types * <li><b>TYPE_NAME</b> String => SQL type name * <li><b>PRECISION</b> int => precision * <li><b>LENGTH</b> int => length in bytes of data * <li><b>SCALE</b> short => scale * <li><b>RADIX</b> short => radix * <li><b>NULLABLE</b> short => can it contain NULL? * <ul><li>procedureNoNulls - does not allow NULL values * <li>procedureNullable - allows NULL values * <li>procedureNullableUnknown - nullability unknown * <li><b>REMARKS</b> String => comment describing parameter/column * </ol> * @param catalog This is ignored in org.postgresql, advise this is set to null * @param schemaPattern This is ignored in org.postgresql, advise this is set to null * @param procedureNamePattern a procedure name pattern * @param columnNamePattern a column name pattern * @return each row is a stored procedure parameter or column description * @exception SQLException if a database-access error occurs * @see #getSearchStringEscape */ // Implementation note: This is required for Borland's JBuilder to work public java.sql.ResultSet getProcedureColumns(String catalog, String schemaPattern, String procedureNamePattern, String columnNamePattern) throws SQLException { if (procedureNamePattern == null) procedureNamePattern = "%"; if (columnNamePattern == null) columnNamePattern = "%"; // for now, this returns an empty result set. Field f[] = new Field[13]; ResultSet r; // ResultSet for the SQL query that we need to do Vector v = new Vector(); // The new ResultSet tuple stuff f[0] = new Field(connection, "PROCEDURE_CAT", iVarcharOid, 32); f[1] = new Field(connection, "PROCEDURE_SCHEM", iVarcharOid, 32); f[2] = new Field(connection, "PROCEDURE_NAME", iVarcharOid, 32); f[3] = new Field(connection, "COLUMN_NAME", iVarcharOid, 32); f[4] = new Field(connection, "COLUMN_TYPE", iInt2Oid, 2); f[5] = new Field(connection, "DATA_TYPE", iInt2Oid, 2); f[6] = new Field(connection, "TYPE_NAME", iVarcharOid, 32); f[7] = new Field(connection, "PRECISION", iInt4Oid, 4); f[8] = new Field(connection, "LENGTH", iInt4Oid, 4); f[9] = new Field(connection, "SCALE", iInt2Oid, 2); f[10] = new Field(connection, "RADIX", iInt2Oid, 2); f[11] = new Field(connection, "NULLABLE", iInt2Oid, 2); f[12] = new Field(connection, "REMARKS", iVarcharOid, 32); // add query loop here return new ResultSet(connection, f, v, "OK", 1); } /* * Get a description of tables available in a catalog. * * <p>Only table descriptions matching the catalog, schema, table * name and type criteria are returned. They are ordered by * TABLE_TYPE, TABLE_SCHEM and TABLE_NAME. * * <p>Each table description has the following columns: * * <ol> * <li><b>TABLE_CAT</b> String => table catalog (may be null) * <li><b>TABLE_SCHEM</b> String => table schema (may be null) * <li><b>TABLE_NAME</b> String => table name * <li><b>TABLE_TYPE</b> String => table type. Typical types are "TABLE", * "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY", "LOCAL * TEMPORARY", "ALIAS", "SYNONYM". * <li><b>REMARKS</b> String => explanatory comment on the table * </ol> * * <p>The valid values for the types parameter are: * "TABLE", "INDEX", "SEQUENCE", "SYSTEM TABLE" and "SYSTEM INDEX" * * @param catalog a catalog name; For org.postgresql, this is ignored, and * should be set to null * @param schemaPattern a schema name pattern; For org.postgresql, this is ignored, and * should be set to null * @param tableNamePattern a table name pattern. For all tables this should be "%" * @param types a list of table types to include; null returns * all types * @return each row is a table description * @exception SQLException if a database-access error occurs. */ public java.sql.ResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String types[]) throws SQLException { // Handle default value for types if (types == null) types = defaultTableTypes; if (tableNamePattern == null) tableNamePattern = "%"; // the field descriptors for the new ResultSet Field f[] = new Field[5]; java.sql.ResultSet r; // ResultSet for the SQL query that we need to do Vector v = new Vector(); // The new ResultSet tuple stuff f[0] = new Field(connection, "TABLE_CAT", iVarcharOid, 32); f[1] = new Field(connection, "TABLE_SCHEM", iVarcharOid, 32); f[2] = new Field(connection, "TABLE_NAME", iVarcharOid, 32); f[3] = new Field(connection, "TABLE_TYPE", iVarcharOid, 32); f[4] = new Field(connection, "REMARKS", iVarcharOid, 32); // Now form the query StringBuffer sql = new StringBuffer("select relname,oid,relkind from pg_class where ("); boolean notFirst = false; for (int i = 0;i < types.length;i++) { for (int j = 0;j < getTableTypes.length;j++) if (getTableTypes[j][0].equals(types[i])) { if (notFirst) sql.append(" or "); sql.append(getTableTypes[j][1]); notFirst = true; } } // Added by Stefan Andreasen <[email protected]> // Now take the pattern into account sql.append(") and relname like '"); sql.append(tableNamePattern.toLowerCase()); sql.append("' order by relkind, relname"); // Now run the query r = connection.ExecSQL(sql.toString()); while (r.next()) { byte[][] tuple = new byte[5][0]; // Fetch the description for the table (if any) String getDescriptionStatement = connection.haveMinimumServerVersion("7.2") ? "select obj_description(" + r.getInt(2) + ",'pg_class')" : "select description from pg_description where objoid=" + r.getInt(2); java.sql.ResultSet dr = connection.ExecSQL(getDescriptionStatement); byte remarks[] = null; if (((org.postgresql.ResultSet)dr).getTupleCount() == 1) { dr.next(); remarks = dr.getBytes(1); } dr.close(); String relKind; switch (r.getBytes(3)[0]) { case (byte) 'r': if ( r.getString(1).startsWith("pg_") ) { relKind = "SYSTEM TABLE"; } else { relKind = "TABLE"; } break; case (byte) 'i': relKind = "INDEX"; break; case (byte) 'S': relKind = "SEQUENCE"; break; case (byte) 'v': relKind = "VIEW"; break; default: relKind = null; } tuple[0] = null; // Catalog name tuple[1] = null; // Schema name tuple[2] = r.getBytes(1); // Table name tuple[3] = (relKind == null) ? null : relKind.getBytes(); // Table type tuple[4] = remarks; // Remarks v.addElement(tuple); } r.close(); return new ResultSet(connection, f, v, "OK", 1); } // This array contains the valid values for the types argument // in getTables(). // // Each supported type consists of it's name, and the sql where // clause to retrieve that value. // // IMPORTANT: the query must be enclosed in ( ) private static final String getTableTypes[][] = { {"TABLE", "(relkind='r' and relhasrules='f' and relname !~ '^pg_' and relname !~ '^xinv')"}, {"VIEW", "(relkind='v' and relname !~ '^pg_')"}, {"INDEX", "(relkind='i' and relname !~ '^pg_')"}, {"SEQUENCE", "(relkind='S' and relname !~ '^pg_')"}, {"SYSTEM TABLE", "(relkind='r' and relname ~ '^pg_')"}, {"SYSTEM INDEX", "(relkind='i' and relname ~ '^pg_')"} }; // These are the default tables, used when NULL is passed to getTables // The choice of these provide the same behaviour as psql's \d private static final String defaultTableTypes[] = { "TABLE", "VIEW", "INDEX", "SEQUENCE" }; /* * Get the schema names available in this database. The results * are ordered by schema name. * * <P>The schema column is: * <OL> * <LI><B>TABLE_SCHEM</B> String => schema name * </OL> * * @return ResultSet each row has a single String column that is a * schema name */ public java.sql.ResultSet getSchemas() throws SQLException { // We don't use schemas, so we simply return a single schema name "". // Field f[] = new Field[1]; Vector v = new Vector(); byte[][] tuple = new byte[1][0]; f[0] = new Field(connection, "TABLE_SCHEM", iVarcharOid, 32); tuple[0] = "".getBytes(); v.addElement(tuple); return new ResultSet(connection, f, v, "OK", 1); } /* * Get the catalog names available in this database. The results * are ordered by catalog name. * * <P>The catalog column is: * <OL> * <LI><B>TABLE_CAT</B> String => catalog name * </OL> * * @return ResultSet each row has a single String column that is a * catalog name */ public java.sql.ResultSet getCatalogs() throws SQLException { return connection.createStatement().executeQuery("select datname as TABLE_CAT from pg_database;"); } /* * Get the table types available in this database. The results * are ordered by table type. * * <P>The table type is: * <OL> * <LI><B>TABLE_TYPE</B> String => table type. Typical types are "TABLE", * "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY", * "LOCAL TEMPORARY", "ALIAS", "SYNONYM". * </OL> * * @return ResultSet each row has a single String column that is a * table type */ public java.sql.ResultSet getTableTypes() throws SQLException { Field f[] = new Field[1]; Vector v = new Vector(); f[0] = new Field(connection, new String("TABLE_TYPE"), iVarcharOid, 32); for (int i = 0;i < getTableTypes.length;i++) { byte[][] tuple = new byte[2][0]; tuple[0] = getTableTypes[i][0].getBytes(); v.addElement(tuple); } return new ResultSet(connection, f, v, "OK", 1); } /* * Get a description of table columns available in a catalog. * * <P>Only column descriptions matching the catalog, schema, table * and column name criteria are returned. They are ordered by * TABLE_SCHEM, TABLE_NAME and ORDINAL_POSITION. * * <P>Each column description has the following columns: * <OL> * <LI><B>TABLE_CAT</B> String => table catalog (may be null) * <LI><B>TABLE_SCHEM</B> String => table schema (may be null) * <LI><B>TABLE_NAME</B> String => table name * <LI><B>COLUMN_NAME</B> String => column name * <LI><B>DATA_TYPE</B> short => SQL type from java.sql.Types * <LI><B>TYPE_NAME</B> String => Data source dependent type name * <LI><B>COLUMN_SIZE</B> int => column size. For char or date * types this is the maximum number of characters, for numeric or * decimal types this is precision. * <LI><B>BUFFER_LENGTH</B> is not used. * <LI><B>DECIMAL_DIGITS</B> int => the number of fractional digits * <LI><B>NUM_PREC_RADIX</B> int => Radix (typically either 10 or 2) * <LI><B>NULLABLE</B> int => is NULL allowed? * <UL> * <LI> columnNoNulls - might not allow NULL values * <LI> columnNullable - definitely allows NULL values * <LI> columnNullableUnknown - nullability unknown * </UL> * <LI><B>REMARKS</B> String => comment describing column (may be null) * <LI><B>COLUMN_DEF</B> String => default value (may be null) * <LI><B>SQL_DATA_TYPE</B> int => unused * <LI><B>SQL_DATETIME_SUB</B> int => unused * <LI><B>CHAR_OCTET_LENGTH</B> int => for char types the * maximum number of bytes in the column * <LI><B>ORDINAL_POSITION</B> int => index of column in table * (starting at 1) * <LI><B>IS_NULLABLE</B> String => "NO" means column definitely * does not allow NULL values; "YES" means the column might * allow NULL values. An empty string means nobody knows. * </OL> * * @param catalog a catalog name; "" retrieves those without a catalog * @param schemaPattern a schema name pattern; "" retrieves those * without a schema * @param tableNamePattern a table name pattern * @param columnNamePattern a column name pattern * @return ResultSet each row is a column description * @see #getSearchStringEscape */ public java.sql.ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) throws SQLException { Vector v = new Vector(); // The new ResultSet tuple stuff Field f[] = new Field[18]; // The field descriptors for the new ResultSet f[0] = new Field(connection, "TABLE_CAT", iVarcharOid, 32); f[1] = new Field(connection, "TABLE_SCHEM", iVarcharOid, 32); f[2] = new Field(connection, "TABLE_NAME", iVarcharOid, 32); f[3] = new Field(connection, "COLUMN_NAME", iVarcharOid, 32); f[4] = new Field(connection, "DATA_TYPE", iInt2Oid, 2); f[5] = new Field(connection, "TYPE_NAME", iVarcharOid, 32); f[6] = new Field(connection, "COLUMN_SIZE", iInt4Oid, 4); f[7] = new Field(connection, "BUFFER_LENGTH", iVarcharOid, 32); f[8] = new Field(connection, "DECIMAL_DIGITS", iInt4Oid, 4); f[9] = new Field(connection, "NUM_PREC_RADIX", iInt4Oid, 4); f[10] = new Field(connection, "NULLABLE", iInt4Oid, 4); f[11] = new Field(connection, "REMARKS", iVarcharOid, 32); f[12] = new Field(connection, "COLUMN_DEF", iVarcharOid, 32); f[13] = new Field(connection, "SQL_DATA_TYPE", iInt4Oid, 4); f[14] = new Field(connection, "SQL_DATETIME_SUB", iInt4Oid, 4); f[15] = new Field(connection, "CHAR_OCTET_LENGTH", iVarcharOid, 32); f[16] = new Field(connection, "ORDINAL_POSITION", iInt4Oid, 4); f[17] = new Field(connection, "IS_NULLABLE", iVarcharOid, 32); StringBuffer sql = new StringBuffer(512); /* Build a >= 7.1 SQL statement to list all columns */ sql.append("select " + (connection.haveMinimumServerVersion("7.2") ? "a.attrelid, " : "a.oid, ") + " c.relname, " + " a.attname, " + " a.atttypid, " + " a.attnum, " + " a.attnotnull, " + " a.attlen, " + " a.atttypmod, " + " d.adsrc, " + " t.typname, " + /* Use the new col_description in 7.2 or an additional outer join in 7.1 */ (connection.haveMinimumServerVersion("7.2") ? "col_description(a.attrelid, a.attnum) " : "e.description ") + "from" + " (" + " (pg_class c inner join pg_attribute a on" + " (" + " a.attrelid=c.oid"); if ((tableNamePattern != null) && ! tableNamePattern.equals("%")) { sql.append(" and c.relname like \'" + tableNamePattern.toLowerCase() + "\'"); } if ((columnNamePattern != null) && ! columnNamePattern.equals("%")) { sql.append(" and a.attname like \'" + columnNamePattern + "\'"); } sql.append( " and a.attnum > 0" + " )" + " ) inner join pg_type t on" + " (" + " t.oid = a.atttypid" + " )" + " )" + " left outer join pg_attrdef d on" + " (" + " c.oid = d.adrelid" + " and a.attnum = d.adnum" + " ) "); if (!connection.haveMinimumServerVersion("7.2")) { /* Only for 7.1 */ sql.append( " left outer join pg_description e on" + " (" + " e.objoid = a.oid" + " ) "); } sql.append("order by" + " c.relname, a.attnum"); java.sql.ResultSet r = connection.ExecSQL(sql.toString()); while (r.next()) { byte[][] tuple = new byte[18][0]; String nullFlag = r.getString(6); String typname = r.getString(10); tuple[0] = null; // Catalog name, not supported tuple[1] = null; // Schema name, not supported tuple[2] = r.getBytes(2); // Table name tuple[3] = r.getBytes(3); // Column name tuple[4] = Integer.toString(connection.getSQLType(typname)).getBytes(); // Data type tuple[5] = typname.getBytes(); // Type name // Column size // Looking at the psql source, // I think the length of a varchar as specified when the table was created // should be extracted from atttypmod which contains this length + sizeof(int32) if (typname.equals("bpchar") || typname.equals("varchar")) { int atttypmod = r.getInt(8); tuple[6] = Integer.toString(atttypmod != -1 ? atttypmod - VARHDRSZ : 0).getBytes(); } else { tuple[6] = r.getBytes(7); } tuple[7] = null; // Buffer length // Decimal digits = scale // From the source (see e.g. backend/utils/adt/format_type.c, // function numeric()) the scale and precision can be calculated // from the typmod value. if (typname.equals("numeric") || typname.equals("decimal")) { int attypmod = r.getInt(8) - VARHDRSZ; tuple[8] = Integer.toString( attypmod & 0xffff ).getBytes(); tuple[9] = Integer.toString( ( attypmod >> 16 ) & 0xffff ).getBytes(); } else { tuple[8] = "0".getBytes(); tuple[9] = "10".getBytes(); // Num Prec Radix - assume decimal } tuple[10] = Integer.toString(nullFlag.equals("f") ? java.sql.DatabaseMetaData.columnNullable : java.sql.DatabaseMetaData.columnNoNulls).getBytes(); // Nullable tuple[11] = r.getBytes(11); // Description (if any) tuple[12] = r.getBytes(9); // Column default tuple[13] = null; // sql data type (unused) tuple[14] = null; // sql datetime sub (unused) tuple[15] = tuple[6]; // char octet length tuple[16] = r.getBytes(5); // ordinal position tuple[17] = (nullFlag.equals("f") ? "YES" : "NO").getBytes(); // Is nullable v.addElement(tuple); } r.close(); return new ResultSet(connection, f, v, "OK", 1); } /* * Get a description of the access rights for a table's columns. * * <P>Only privileges matching the column name criteria are * returned. They are ordered by COLUMN_NAME and PRIVILEGE. * * <P>Each privilige description has the following columns: * <OL> * <LI><B>TABLE_CAT</B> String => table catalog (may be null) * <LI><B>TABLE_SCHEM</B> String => table schema (may be null) * <LI><B>TABLE_NAME</B> String => table name * <LI><B>COLUMN_NAME</B> String => column name * <LI><B>GRANTOR</B> => grantor of access (may be null) * <LI><B>GRANTEE</B> String => grantee of access * <LI><B>PRIVILEGE</B> String => name of access (SELECT, * INSERT, UPDATE, REFRENCES, ...) * <LI><B>IS_GRANTABLE</B> String => "YES" if grantee is permitted * to grant to others; "NO" if not; null if unknown * </OL> * * @param catalog a catalog name; "" retrieves those without a catalog * @param schema a schema name; "" retrieves those without a schema * @param table a table name * @param columnNamePattern a column name pattern * @return ResultSet each row is a column privilege description * @see #getSearchStringEscape */ public java.sql.ResultSet getColumnPrivileges(String catalog, String schema, String table, String columnNamePattern) throws SQLException { Field f[] = new Field[8]; Vector v = new Vector(); if (table == null) table = "%"; if (columnNamePattern == null) columnNamePattern = "%"; else columnNamePattern = columnNamePattern.toLowerCase(); f[0] = new Field(connection, "TABLE_CAT", iVarcharOid, 32); f[1] = new Field(connection, "TABLE_SCHEM", iVarcharOid, 32); f[2] = new Field(connection, "TABLE_NAME", iVarcharOid, 32); f[3] = new Field(connection, "COLUMN_NAME", iVarcharOid, 32); f[4] = new Field(connection, "GRANTOR", iVarcharOid, 32); f[5] = new Field(connection, "GRANTEE", iVarcharOid, 32); f[6] = new Field(connection, "PRIVILEGE", iVarcharOid, 32); f[7] = new Field(connection, "IS_GRANTABLE", iVarcharOid, 32); // This is taken direct from the psql source java.sql.ResultSet r = connection.ExecSQL("SELECT relname, relacl FROM pg_class, pg_user WHERE ( relkind = 'r' OR relkind = 'i') and relname !~ '^pg_' and relname !~ '^xin[vx][0-9]+' and usesysid = relowner and relname like '" + table.toLowerCase() + "' ORDER BY relname"); while (r.next()) { byte[][] tuple = new byte[8][0]; tuple[0] = tuple[1] = "".getBytes(); if (Driver.logDebug) Driver.debug("relname=\"" + r.getString(1) + "\" relacl=\"" + r.getString(2) + "\""); // For now, don't add to the result as relacl needs to be processed. //v.addElement(tuple); } return new ResultSet(connection, f, v, "OK", 1); } /* * Get a description of the access rights for each table available * in a catalog. * * This method is currently unimplemented. * * <P>Only privileges matching the schema and table name * criteria are returned. They are ordered by TABLE_SCHEM, * TABLE_NAME, and PRIVILEGE. * * <P>Each privilige description has the following columns: * <OL> * <LI><B>TABLE_CAT</B> String => table catalog (may be null) * <LI><B>TABLE_SCHEM</B> String => table schema (may be null) * <LI><B>TABLE_NAME</B> String => table name * <LI><B>COLUMN_NAME</B> String => column name * <LI><B>GRANTOR</B> => grantor of access (may be null) * <LI><B>GRANTEE</B> String => grantee of access * <LI><B>PRIVILEGE</B> String => name of access (SELECT, * INSERT, UPDATE, REFRENCES, ...) * <LI><B>IS_GRANTABLE</B> String => "YES" if grantee is permitted * to grant to others; "NO" if not; null if unknown * </OL> * * @param catalog a catalog name; "" retrieves those without a catalog * @param schemaPattern a schema name pattern; "" retrieves those * without a schema * @param tableNamePattern a table name pattern * @return ResultSet each row is a table privilege description * @see #getSearchStringEscape */ public java.sql.ResultSet getTablePrivileges(String catalog, String schemaPattern, String tableNamePattern) throws SQLException { Field f[] = new Field[8]; Vector v = new Vector(); if (tableNamePattern == null) tableNamePattern = "%"; f[0] = new Field(connection, "TABLE_CAT", iVarcharOid, 32); f[1] = new Field(connection, "TABLE_SCHEM", iVarcharOid, 32); f[2] = new Field(connection, "TABLE_NAME", iVarcharOid, 32); f[3] = new Field(connection, "COLUMN_NAME", iVarcharOid, 32); f[4] = new Field(connection, "GRANTOR", iVarcharOid, 32); f[5] = new Field(connection, "GRANTEE", iVarcharOid, 32); f[6] = new Field(connection, "PRIVILEGE", iVarcharOid, 32); f[7] = new Field(connection, "IS_GRANTABLE", iVarcharOid, 32); // This is taken direct from the psql source java.sql.ResultSet r = connection.ExecSQL("SELECT relname, relacl FROM pg_class, pg_user WHERE ( relkind = 'r' OR relkind = 'i') and relname !~ '^pg_' and relname !~ '^xin[vx][0-9]+' and usesysid = relowner and relname like '" + tableNamePattern.toLowerCase() + "' ORDER BY relname"); while (r.next()) { byte[][] tuple = new byte[8][0]; tuple[0] = tuple[1] = "".getBytes(); if (Driver.logDebug) Driver.debug("relname=\"" + r.getString(1) + "\" relacl=\"" + r.getString(2) + "\""); // For now, don't add to the result as relacl needs to be processed. //v.addElement(tuple); } return new ResultSet(connection, f, v, "OK", 1); } /* * Get a description of a table's optimal set of columns that * uniquely identifies a row. They are ordered by SCOPE. * * This method is currently not implemented. * * <P>Each column description has the following columns: * <OL> * <LI><B>SCOPE</B> short => actual scope of result * <UL> * <LI> bestRowTemporary - very temporary, while using row * <LI> bestRowTransaction - valid for remainder of current transaction * <LI> bestRowSession - valid for remainder of current session * </UL> * <LI><B>COLUMN_NAME</B> String => column name * <LI><B>DATA_TYPE</B> short => SQL data type from java.sql.Types * <LI><B>TYPE_NAME</B> String => Data source dependent type name * <LI><B>COLUMN_SIZE</B> int => precision * <LI><B>BUFFER_LENGTH</B> int => not used * <LI><B>DECIMAL_DIGITS</B> short => scale * <LI><B>PSEUDO_COLUMN</B> short => is this a pseudo column * like an Oracle ROWID * <UL> * <LI> bestRowUnknown - may or may not be pseudo column * <LI> bestRowNotPseudo - is NOT a pseudo column * <LI> bestRowPseudo - is a pseudo column * </UL> * </OL> * * @param catalog a catalog name; "" retrieves those without a catalog * @param schema a schema name; "" retrieves those without a schema * @param table a table name * @param scope the scope of interest; use same values as SCOPE * @param nullable include columns that are nullable? * @return ResultSet each row is a column description */ // Implementation note: This is required for Borland's JBuilder to work public java.sql.ResultSet getBestRowIdentifier(String catalog, String schema, String table, int scope, boolean nullable) throws SQLException { // for now, this returns an empty result set. Field f[] = new Field[8]; ResultSet r; // ResultSet for the SQL query that we need to do Vector v = new Vector(); // The new ResultSet tuple stuff f[0] = new Field(connection, "SCOPE", iInt2Oid, 2); f[1] = new Field(connection, "COLUMN_NAME", iVarcharOid, 32); f[2] = new Field(connection, "DATA_TYPE", iInt2Oid, 2); f[3] = new Field(connection, "TYPE_NAME", iVarcharOid, 32); f[4] = new Field(connection, "COLUMN_SIZE", iInt4Oid, 4); f[5] = new Field(connection, "BUFFER_LENGTH", iInt4Oid, 4); f[6] = new Field(connection, "DECIMAL_DIGITS", iInt2Oid, 2); f[7] = new Field(connection, "PSEUDO_COLUMN", iInt2Oid, 2); return new ResultSet(connection, f, v, "OK", 1); } /* * Get a description of a table's columns that are automatically * updated when any value in a row is updated. They are * unordered. * * This method is currently unimplemented. * * <P>Each column description has the following columns: * <OL> * <LI><B>SCOPE</B> short => is not used * <LI><B>COLUMN_NAME</B> String => column name * <LI><B>DATA_TYPE</B> short => SQL data type from java.sql.Types * <LI><B>TYPE_NAME</B> String => Data source dependent type name * <LI><B>COLUMN_SIZE</B> int => precision * <LI><B>BUFFER_LENGTH</B> int => length of column value in bytes * <LI><B>DECIMAL_DIGITS</B> short => scale * <LI><B>PSEUDO_COLUMN</B> short => is this a pseudo column * like an Oracle ROWID * <UL> * <LI> versionColumnUnknown - may or may not be pseudo column * <LI> versionColumnNotPseudo - is NOT a pseudo column * <LI> versionColumnPseudo - is a pseudo column * </UL> * </OL> * * @param catalog a catalog name; "" retrieves those without a catalog * @param schema a schema name; "" retrieves those without a schema * @param table a table name * @return ResultSet each row is a column description */ public java.sql.ResultSet getVersionColumns(String catalog, String schema, String table) throws SQLException { throw org.postgresql.Driver.notImplemented(); } /* * Get a description of a table's primary key columns. They * are ordered by COLUMN_NAME. * * <P>Each column description has the following columns: * <OL> * <LI><B>TABLE_CAT</B> String => table catalog (may be null) * <LI><B>TABLE_SCHEM</B> String => table schema (may be null) * <LI><B>TABLE_NAME</B> String => table name * <LI><B>COLUMN_NAME</B> String => column name * <LI><B>KEY_SEQ</B> short => sequence number within primary key * <LI><B>PK_NAME</B> String => primary key name (may be null) * </OL> * * @param catalog a catalog name; "" retrieves those without a catalog * @param schema a schema name pattern; "" retrieves those * without a schema * @param table a table name * @return ResultSet each row is a primary key column description */ public java.sql.ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException { return connection.createStatement().executeQuery("SELECT " + "'' as TABLE_CAT," + "'' AS TABLE_SCHEM," + "bc.relname AS TABLE_NAME," + "a.attname AS COLUMN_NAME," + "a.attnum as KEY_SEQ," + "ic.relname as PK_NAME " + " FROM pg_class bc, pg_class ic, pg_index i, pg_attribute a" + " WHERE bc.relkind = 'r' " + // -- not indices " and upper(bc.relname) = upper('" + table + "')" + " and i.indrelid = bc.oid" + " and i.indexrelid = ic.oid" + " and ic.oid = a.attrelid" + " and i.indisprimary='t' " + " ORDER BY table_name, pk_name, key_seq" ); } /* SELECT c.relname as primary, c2.relname as foreign, t.tgconstrname, ic.relname as fkeyname, af.attnum as fkeyseq, ipc.relname as pkeyname, ap.attnum as pkeyseq, t.tgdeferrable, t.tginitdeferred, t.tgnargs,t.tgargs, p1.proname as updaterule, p2.proname as deleterule FROM pg_trigger t, pg_trigger t1, pg_class c, pg_class c2, pg_class ic, pg_class ipc, pg_proc p1, pg_proc p2, pg_index if, pg_index ip, pg_attribute af, pg_attribute ap WHERE (t.tgrelid=c.oid AND t.tgisconstraint AND t.tgconstrrelid=c2.oid AND t.tgfoid=p1.oid and p1.proname like '%%upd') and (t1.tgrelid=c.oid and t1.tgisconstraint and t1.tgconstrrelid=c2.oid AND t1.tgfoid=p2.oid and p2.proname like '%%del') AND c2.relname='users' AND (if.indrelid=c.oid AND if.indexrelid=ic.oid and ic.oid=af.attrelid AND if.indisprimary) and (ip.indrelid=c2.oid and ip.indexrelid=ipc.oid and ipc.oid=ap.attrelid and ip.indisprimary) */ /** * * @param catalog * @param schema * @param primaryTable if provided will get the keys exported by this table * @param foreignTable if provided will get the keys imported by this table * @return ResultSet * @throws SQLException */ private java.sql.ResultSet getImportedExportedKeys(String catalog, String schema, String primaryTable, String foreignTable) throws SQLException { Field f[] = new Field[14]; f[0] = new Field(connection, "PKTABLE_CAT", iVarcharOid, 32); f[1] = new Field(connection, "PKTABLE_SCHEM", iVarcharOid, 32); f[2] = new Field(connection, "PKTABLE_NAME", iVarcharOid, 32); f[3] = new Field(connection, "PKCOLUMN_NAME", iVarcharOid, 32); f[4] = new Field(connection, "FKTABLE_CAT", iVarcharOid, 32); f[5] = new Field(connection, "FKTABLE_SCHEM", iVarcharOid, 32); f[6] = new Field(connection, "FKTABLE_NAME", iVarcharOid, 32); f[7] = new Field(connection, "FKCOLUMN_NAME", iVarcharOid, 32); f[8] = new Field(connection, "KEY_SEQ", iInt2Oid, 2); f[9] = new Field(connection, "UPDATE_RULE", iInt2Oid, 2); f[10] = new Field(connection, "DELETE_RULE", iInt2Oid, 2); f[11] = new Field(connection, "FK_NAME", iVarcharOid, 32); f[12] = new Field(connection, "PK_NAME", iVarcharOid, 32); f[13] = new Field(connection, "DEFERRABILITY", iInt2Oid, 2); java.sql.ResultSet rs = connection.ExecSQL( - "SELECT " + "SELECT distinct " + "c.relname as prelname, " + "c2.relname as frelname, " + "t.tgconstrname, " + "a.attnum as keyseq, " + "ic.relname as fkeyname, " + "t.tgdeferrable, " + "t.tginitdeferred, " + "t.tgnargs,t.tgargs, " + "p1.proname as updaterule, " + "p2.proname as deleterule " + "FROM " + "pg_trigger t, " + "pg_trigger t1, " + "pg_class c, " + "pg_class c2, " + "pg_class ic, " + "pg_proc p1, " + "pg_proc p2, " + "pg_index i, " + "pg_attribute a " + "WHERE " // isolate the update rule + "(t.tgrelid=c.oid " + "AND t.tgisconstraint " + "AND t.tgconstrrelid=c2.oid " + "AND t.tgfoid=p1.oid " + "and p1.proname like '%%upd') " + "and " // isolate the delete rule + "(t1.tgrelid=c.oid " + "and t1.tgisconstraint " + "and t1.tgconstrrelid=c2.oid " + "AND t1.tgfoid=p2.oid " + "and p2.proname like '%%del') " // if we are looking for exported keys then primary table will be used + ((primaryTable != null) ? "AND c.relname='" + primaryTable + "' " : "") // if we are looking for imported keys then the foreign table will be used + ((foreignTable != null) ? "AND c2.relname='" + foreignTable + "' " : "") + "AND i.indrelid=c.oid " + "AND i.indexrelid=ic.oid " + "AND ic.oid=a.attrelid " + "AND i.indisprimary " + "ORDER BY " // orderby is as follows getExported, orders by FKTABLE, // getImported orders by PKTABLE // getCrossReference orders by FKTABLE, so this should work for both, // since when getting crossreference, primaryTable will be defined + (primaryTable != null ? "frelname" : "prelname") + ",keyseq"); // returns the following columns // and some example data with a table defined as follows // create table people ( id int primary key); // create table policy ( id int primary key); // create table users ( id int primary key, people_id int references people(id), policy_id int references policy(id)) // prelname | frelname | tgconstrname | keyseq | fkeyName | tgdeferrable | tginitdeferred // 1 | 2 | 3 | 4 | 5 | 6 | 7 // people | users | <unnamed> | 1 | people_pkey | f | f // | tgnargs | tgargs | updaterule | deleterule // | 8 | 9 | 10 | 11 // | 6 | <unnamed>\000users\000people\000UNSPECIFIED\000people_id\000id\000 | RI_FKey_noaction_upd | RI_FKey_noaction_del Vector tuples = new Vector(); while ( rs.next() ) { byte tuple[][] = new byte[14][]; tuple[2] = rs.getBytes(1); //PKTABLE_NAME tuple[6] = rs.getBytes(2); //FKTABLE_NAME String fKeyName = rs.getString(3); String updateRule = rs.getString(10); if (updateRule != null ) { // Rules look like this RI_FKey_noaction_del so we want to pull out the part between the 'Key_' and the last '_' s String rule = updateRule.substring(8, updateRule.length() - 4); int action = importedKeyNoAction; if ( rule == null || "noaction".equals(rule) ) action = importedKeyNoAction; if ("cascade".equals(rule)) action = importedKeyCascade; else if ("setnull".equals(rule)) action = importedKeySetNull; else if ("setdefault".equals(rule)) action = importedKeySetDefault; else if ("restrict".equals(rule)) action = importedKeyRestrict; tuple[9] = Integer.toString(action).getBytes(); } String deleteRule = rs.getString(11); if ( deleteRule != null ) { String rule = updateRule.substring(8, updateRule.length() - 4); int action = importedKeyNoAction; if ("cascade".equals(rule)) action = importedKeyCascade; else if ("setnull".equals(rule)) action = importedKeySetNull; else if ("setdefault".equals(rule)) action = importedKeySetDefault; tuple[10] = Integer.toString(action).getBytes(); } // Parse the tgargs data StringBuffer fkeyColumns = new StringBuffer(); StringBuffer pkeyColumns = new StringBuffer(); // Note, I am guessing at most of this, but it should be close // if not, please correct // the keys are in pairs and start after the first four arguments // the arguments are seperated by \000 int numColumns = (rs.getInt(8) >> 1) - 2; // get the args String targs = rs.getString(9); // start parsing from the end int pos = targs.lastIndexOf("\\000"); for (int c = 0;c < numColumns;c++) { // this should never be, since we should never get to the beginning of the string // as the number of columns should override this, but it is a safe test if (pos > -1) { int pos2 = targs.lastIndexOf("\\000", pos - 1); if (pos2 > -1) { // seperate the pkColumns by ',' s if (pkeyColumns.length() > 0) pkeyColumns.insert(0, ','); // extract the column name out 4 characters ahead essentially removing the /000 pkeyColumns.insert(0, targs.substring(pos2 + 4, pos)); //PKCOLUMN_NAME // now find the associated fkColumn pos = targs.lastIndexOf("\\000", pos2 - 1); if (pos > -1) { if (fkeyColumns.length() > 0) fkeyColumns.insert(0, ','); fkeyColumns.insert(0, targs.substring(pos + 4, pos2)); //FKCOLUMN_NAME } } } } tuple[3] = pkeyColumns.toString().getBytes(); //PKCOLUMN_NAME tuple[7] = fkeyColumns.toString().getBytes(); //FKCOLUMN_NAME tuple[8] = rs.getBytes(4); //KEY_SEQ - tuple[11] = rs.getBytes(5); //FK_NAME - tuple[12] = rs.getBytes(3); //PK_NAME + tuple[11] = rs.getBytes(3); //FK_NAME + tuple[12] = rs.getBytes(5); //PK_NAME // DEFERRABILITY int deferrability = importedKeyNotDeferrable; boolean deferrable = rs.getBoolean(6); boolean initiallyDeferred = rs.getBoolean(7); if (deferrable) { if (initiallyDeferred) deferrability = importedKeyInitiallyDeferred; else deferrability = importedKeyInitiallyImmediate; } tuple[13] = Integer.toString(deferrability).getBytes(); tuples.addElement(tuple); } return new ResultSet(connection, f, tuples, "OK", 1); } /* * Get a description of the primary key columns that are * referenced by a table's foreign key columns (the primary keys * imported by a table). They are ordered by PKTABLE_CAT, * PKTABLE_SCHEM, PKTABLE_NAME, and KEY_SEQ. * * <P>Each primary key column description has the following columns: * <OL> * <LI><B>PKTABLE_CAT</B> String => primary key table catalog * being imported (may be null) * <LI><B>PKTABLE_SCHEM</B> String => primary key table schema * being imported (may be null) * <LI><B>PKTABLE_NAME</B> String => primary key table name * being imported * <LI><B>PKCOLUMN_NAME</B> String => primary key column name * being imported * <LI><B>FKTABLE_CAT</B> String => foreign key table catalog (may be null) * <LI><B>FKTABLE_SCHEM</B> String => foreign key table schema (may be null) * <LI><B>FKTABLE_NAME</B> String => foreign key table name * <LI><B>FKCOLUMN_NAME</B> String => foreign key column name * <LI><B>KEY_SEQ</B> short => sequence number within foreign key * <LI><B>UPDATE_RULE</B> short => What happens to * foreign key when primary is updated: * <UL> * <LI> importedKeyCascade - change imported key to agree * with primary key update * <LI> importedKeyRestrict - do not allow update of primary * key if it has been imported * <LI> importedKeySetNull - change imported key to NULL if * its primary key has been updated * </UL> * <LI><B>DELETE_RULE</B> short => What happens to * the foreign key when primary is deleted. * <UL> * <LI> importedKeyCascade - delete rows that import a deleted key * <LI> importedKeyRestrict - do not allow delete of primary * key if it has been imported * <LI> importedKeySetNull - change imported key to NULL if * its primary key has been deleted * </UL> * <LI><B>FK_NAME</B> String => foreign key name (may be null) * <LI><B>PK_NAME</B> String => primary key name (may be null) * </OL> * * @param catalog a catalog name; "" retrieves those without a catalog * @param schema a schema name pattern; "" retrieves those * without a schema * @param table a table name * @return ResultSet each row is a primary key column description * @see #getExportedKeys */ public java.sql.ResultSet getImportedKeys(String catalog, String schema, String table) throws SQLException { return getImportedExportedKeys(catalog, schema, null, table); } /* * Get a description of a foreign key columns that reference a * table's primary key columns (the foreign keys exported by a * table). They are ordered by FKTABLE_CAT, FKTABLE_SCHEM, * FKTABLE_NAME, and KEY_SEQ. * * This method is currently unimplemented. * * <P>Each foreign key column description has the following columns: * <OL> * <LI><B>PKTABLE_CAT</B> String => primary key table catalog (may be null) * <LI><B>PKTABLE_SCHEM</B> String => primary key table schema (may be null) * <LI><B>PKTABLE_NAME</B> String => primary key table name * <LI><B>PKCOLUMN_NAME</B> String => primary key column name * <LI><B>FKTABLE_CAT</B> String => foreign key table catalog (may be null) * being exported (may be null) * <LI><B>FKTABLE_SCHEM</B> String => foreign key table schema (may be null) * being exported (may be null) * <LI><B>FKTABLE_NAME</B> String => foreign key table name * being exported * <LI><B>FKCOLUMN_NAME</B> String => foreign key column name * being exported * <LI><B>KEY_SEQ</B> short => sequence number within foreign key * <LI><B>UPDATE_RULE</B> short => What happens to * foreign key when primary is updated: * <UL> * <LI> importedKeyCascade - change imported key to agree * with primary key update * <LI> importedKeyRestrict - do not allow update of primary * key if it has been imported * <LI> importedKeySetNull - change imported key to NULL if * its primary key has been updated * </UL> * <LI><B>DELETE_RULE</B> short => What happens to * the foreign key when primary is deleted. * <UL> * <LI> importedKeyCascade - delete rows that import a deleted key * <LI> importedKeyRestrict - do not allow delete of primary * key if it has been imported * <LI> importedKeySetNull - change imported key to NULL if * its primary key has been deleted * </UL> * <LI><B>FK_NAME</B> String => foreign key identifier (may be null) * <LI><B>PK_NAME</B> String => primary key identifier (may be null) * </OL> * * @param catalog a catalog name; "" retrieves those without a catalog * @param schema a schema name pattern; "" retrieves those * without a schema * @param table a table name * @return ResultSet each row is a foreign key column description * @see #getImportedKeys */ public java.sql.ResultSet getExportedKeys(String catalog, String schema, String table) throws SQLException { return getImportedExportedKeys(catalog, schema, table, null); } /* * Get a description of the foreign key columns in the foreign key * table that reference the primary key columns of the primary key * table (describe how one table imports another's key.) This * should normally return a single foreign key/primary key pair * (most tables only import a foreign key from a table once.) They * are ordered by FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME, and * KEY_SEQ. * * This method is currently unimplemented. * * <P>Each foreign key column description has the following columns: * <OL> * <LI><B>PKTABLE_CAT</B> String => primary key table catalog (may be null) * <LI><B>PKTABLE_SCHEM</B> String => primary key table schema (may be null) * <LI><B>PKTABLE_NAME</B> String => primary key table name * <LI><B>PKCOLUMN_NAME</B> String => primary key column name * <LI><B>FKTABLE_CAT</B> String => foreign key table catalog (may be null) * being exported (may be null) * <LI><B>FKTABLE_SCHEM</B> String => foreign key table schema (may be null) * being exported (may be null) * <LI><B>FKTABLE_NAME</B> String => foreign key table name * being exported * <LI><B>FKCOLUMN_NAME</B> String => foreign key column name * being exported * <LI><B>KEY_SEQ</B> short => sequence number within foreign key * <LI><B>UPDATE_RULE</B> short => What happens to * foreign key when primary is updated: * <UL> * <LI> importedKeyCascade - change imported key to agree * with primary key update * <LI> importedKeyRestrict - do not allow update of primary * key if it has been imported * <LI> importedKeySetNull - change imported key to NULL if * its primary key has been updated * </UL> * <LI><B>DELETE_RULE</B> short => What happens to * the foreign key when primary is deleted. * <UL> * <LI> importedKeyCascade - delete rows that import a deleted key * <LI> importedKeyRestrict - do not allow delete of primary * key if it has been imported * <LI> importedKeySetNull - change imported key to NULL if * its primary key has been deleted * </UL> * <LI><B>FK_NAME</B> String => foreign key identifier (may be null) * <LI><B>PK_NAME</B> String => primary key identifier (may be null) * </OL> * * @param catalog a catalog name; "" retrieves those without a catalog * @param schema a schema name pattern; "" retrieves those * without a schema * @param table a table name * @return ResultSet each row is a foreign key column description * @see #getImportedKeys */ public java.sql.ResultSet getCrossReference(String primaryCatalog, String primarySchema, String primaryTable, String foreignCatalog, String foreignSchema, String foreignTable) throws SQLException { return getImportedExportedKeys(primaryCatalog, primarySchema, primaryTable, foreignTable); } /* * Get a description of all the standard SQL types supported by * this database. They are ordered by DATA_TYPE and then by how * closely the data type maps to the corresponding JDBC SQL type. * * <P>Each type description has the following columns: * <OL> * <LI><B>TYPE_NAME</B> String => Type name * <LI><B>DATA_TYPE</B> short => SQL data type from java.sql.Types * <LI><B>PRECISION</B> int => maximum precision * <LI><B>LITERAL_PREFIX</B> String => prefix used to quote a literal * (may be null) * <LI><B>LITERAL_SUFFIX</B> String => suffix used to quote a literal (may be null) * <LI><B>CREATE_PARAMS</B> String => parameters used in creating * the type (may be null) * <LI><B>NULLABLE</B> short => can you use NULL for this type? * <UL> * <LI> typeNoNulls - does not allow NULL values * <LI> typeNullable - allows NULL values * <LI> typeNullableUnknown - nullability unknown * </UL> * <LI><B>CASE_SENSITIVE</B> boolean=> is it case sensitive? * <LI><B>SEARCHABLE</B> short => can you use "WHERE" based on this type: * <UL> * <LI> typePredNone - No support * <LI> typePredChar - Only supported with WHERE .. LIKE * <LI> typePredBasic - Supported except for WHERE .. LIKE * <LI> typeSearchable - Supported for all WHERE .. * </UL> * <LI><B>UNSIGNED_ATTRIBUTE</B> boolean => is it unsigned? * <LI><B>FIXED_PREC_SCALE</B> boolean => can it be a money value? * <LI><B>AUTO_INCREMENT</B> boolean => can it be used for an * auto-increment value? * <LI><B>LOCAL_TYPE_NAME</B> String => localized version of type name * (may be null) * <LI><B>MINIMUM_SCALE</B> short => minimum scale supported * <LI><B>MAXIMUM_SCALE</B> short => maximum scale supported * <LI><B>SQL_DATA_TYPE</B> int => unused * <LI><B>SQL_DATETIME_SUB</B> int => unused * <LI><B>NUM_PREC_RADIX</B> int => usually 2 or 10 * </OL> * * @return ResultSet each row is a SQL type description */ public java.sql.ResultSet getTypeInfo() throws SQLException { java.sql.ResultSet rs = connection.ExecSQL("select typname from pg_type"); if (rs != null) { Field f[] = new Field[18]; ResultSet r; // ResultSet for the SQL query that we need to do Vector v = new Vector(); // The new ResultSet tuple stuff f[0] = new Field(connection, "TYPE_NAME", iVarcharOid, 32); f[1] = new Field(connection, "DATA_TYPE", iInt2Oid, 2); f[2] = new Field(connection, "PRECISION", iInt4Oid, 4); f[3] = new Field(connection, "LITERAL_PREFIX", iVarcharOid, 32); f[4] = new Field(connection, "LITERAL_SUFFIX", iVarcharOid, 32); f[5] = new Field(connection, "CREATE_PARAMS", iVarcharOid, 32); f[6] = new Field(connection, "NULLABLE", iInt2Oid, 2); f[7] = new Field(connection, "CASE_SENSITIVE", iBoolOid, 1); f[8] = new Field(connection, "SEARCHABLE", iInt2Oid, 2); f[9] = new Field(connection, "UNSIGNED_ATTRIBUTE", iBoolOid, 1); f[10] = new Field(connection, "FIXED_PREC_SCALE", iBoolOid, 1); f[11] = new Field(connection, "AUTO_INCREMENT", iBoolOid, 1); f[12] = new Field(connection, "LOCAL_TYPE_NAME", iVarcharOid, 32); f[13] = new Field(connection, "MINIMUM_SCALE", iInt2Oid, 2); f[14] = new Field(connection, "MAXIMUM_SCALE", iInt2Oid, 2); f[15] = new Field(connection, "SQL_DATA_TYPE", iInt4Oid, 4); f[16] = new Field(connection, "SQL_DATETIME_SUB", iInt4Oid, 4); f[17] = new Field(connection, "NUM_PREC_RADIX", iInt4Oid, 4); // cache some results, this will keep memory useage down, and speed // things up a little. byte b9[] = "9".getBytes(); byte b10[] = "10".getBytes(); byte bf[] = "f".getBytes(); byte bnn[] = Integer.toString(typeNoNulls).getBytes(); byte bts[] = Integer.toString(typeSearchable).getBytes(); while (rs.next()) { byte[][] tuple = new byte[18][]; String typname = rs.getString(1); tuple[0] = typname.getBytes(); tuple[1] = Integer.toString(connection.getSQLType(typname)).getBytes(); tuple[2] = b9; // for now tuple[6] = bnn; // for now tuple[7] = bf; // false for now - not case sensitive tuple[8] = bts; tuple[9] = bf; // false for now - it's signed tuple[10] = bf; // false for now - must handle money tuple[11] = bf; // false for now - handle autoincrement // 12 - LOCAL_TYPE_NAME is null // 13 & 14 ? // 15 & 16 are unused so we return null tuple[17] = b10; // everything is base 10 v.addElement(tuple); } rs.close(); return new ResultSet(connection, f, v, "OK", 1); } throw new PSQLException("postgresql.metadata.unavailable"); } /* * Get a description of a table's indices and statistics. They are * ordered by NON_UNIQUE, TYPE, INDEX_NAME, and ORDINAL_POSITION. * * <P>Each index column description has the following columns: * <OL> * <LI><B>TABLE_CAT</B> String => table catalog (may be null) * <LI><B>TABLE_SCHEM</B> String => table schema (may be null) * <LI><B>TABLE_NAME</B> String => table name * <LI><B>NON_UNIQUE</B> boolean => Can index values be non-unique? * false when TYPE is tableIndexStatistic * <LI><B>INDEX_QUALIFIER</B> String => index catalog (may be null); * null when TYPE is tableIndexStatistic * <LI><B>INDEX_NAME</B> String => index name; null when TYPE is * tableIndexStatistic * <LI><B>TYPE</B> short => index type: * <UL> * <LI> tableIndexStatistic - this identifies table statistics that are * returned in conjuction with a table's index descriptions * <LI> tableIndexClustered - this is a clustered index * <LI> tableIndexHashed - this is a hashed index * <LI> tableIndexOther - this is some other style of index * </UL> * <LI><B>ORDINAL_POSITION</B> short => column sequence number * within index; zero when TYPE is tableIndexStatistic * <LI><B>COLUMN_NAME</B> String => column name; null when TYPE is * tableIndexStatistic * <LI><B>ASC_OR_DESC</B> String => column sort sequence, "A" => ascending * "D" => descending, may be null if sort sequence is not supported; * null when TYPE is tableIndexStatistic * <LI><B>CARDINALITY</B> int => When TYPE is tableIndexStatisic then * this is the number of rows in the table; otherwise it is the * number of unique values in the index. * <LI><B>PAGES</B> int => When TYPE is tableIndexStatisic then * this is the number of pages used for the table, otherwise it * is the number of pages used for the current index. * <LI><B>FILTER_CONDITION</B> String => Filter condition, if any. * (may be null) * </OL> * * @param catalog a catalog name; "" retrieves those without a catalog * @param schema a schema name pattern; "" retrieves those without a schema * @param table a table name * @param unique when true, return only indices for unique values; * when false, return indices regardless of whether unique or not * @param approximate when true, result is allowed to reflect approximate * or out of data values; when false, results are requested to be * accurate * @return ResultSet each row is an index column description */ // Implementation note: This is required for Borland's JBuilder to work public java.sql.ResultSet getIndexInfo(String catalog, String schema, String tableName, boolean unique, boolean approximate) throws SQLException { Field f[] = new Field[13]; java.sql.ResultSet r; // ResultSet for the SQL query that we need to do Vector v = new Vector(); // The new ResultSet tuple stuff f[0] = new Field(connection, "TABLE_CAT", iVarcharOid, 32); f[1] = new Field(connection, "TABLE_SCHEM", iVarcharOid, 32); f[2] = new Field(connection, "TABLE_NAME", iVarcharOid, 32); f[3] = new Field(connection, "NON_UNIQUE", iBoolOid, 1); f[4] = new Field(connection, "INDEX_QUALIFIER", iVarcharOid, 32); f[5] = new Field(connection, "INDEX_NAME", iVarcharOid, 32); f[6] = new Field(connection, "TYPE", iInt2Oid, 2); f[7] = new Field(connection, "ORDINAL_POSITION", iInt2Oid, 2); f[8] = new Field(connection, "COLUMN_NAME", iVarcharOid, 32); f[9] = new Field(connection, "ASC_OR_DESC", iVarcharOid, 32); f[10] = new Field(connection, "CARDINALITY", iInt4Oid, 4); f[11] = new Field(connection, "PAGES", iInt4Oid, 4); f[12] = new Field(connection, "FILTER_CONDITION", iVarcharOid, 32); r = connection.ExecSQL("select " + "c.relname, " + "x.indisunique, " + "i.relname, " + "x.indisclustered, " + "a.amname, " + "x.indkey, " + "c.reltuples, " + "c.relpages, " + "x.indexrelid " + "FROM pg_index x, pg_class c, pg_class i, pg_am a " + "WHERE ((c.relname = '" + tableName.toLowerCase() + "') " + " AND (c.oid = x.indrelid) " + " AND (i.oid = x.indexrelid) " + " AND (i.relam = a.oid)) " + "ORDER BY x.indisunique DESC, " + " x.indisclustered, a.amname, i.relname"); while (r.next()) { // indkey is an array of column ordinals (integers). In the JDBC // interface, this has to be separated out into a separate // tuple for each indexed column. Also, getArray() is not yet // implemented for Postgres JDBC, so we parse by hand. String columnOrdinalString = r.getString(6); StringTokenizer stok = new StringTokenizer(columnOrdinalString); int [] columnOrdinals = new int[stok.countTokens()]; int o = 0; while (stok.hasMoreTokens()) { columnOrdinals[o++] = Integer.parseInt(stok.nextToken()); } java.sql.ResultSet columnNameRS = connection.ExecSQL("select a.attname FROM pg_attribute a WHERE a.attrelid = " + r.getInt(9)); for (int i = 0; i < columnOrdinals.length; i++) { byte [] [] tuple = new byte [13] []; tuple[0] = "".getBytes(); tuple[1] = "".getBytes(); tuple[2] = r.getBytes(1); tuple[3] = r.getBoolean(2) ? "false".getBytes() : "true".getBytes(); tuple[4] = null; tuple[5] = r.getBytes(3); tuple[6] = r.getBoolean(4) ? Integer.toString(tableIndexClustered).getBytes() : r.getString(5).equals("hash") ? Integer.toString(tableIndexHashed).getBytes() : Integer.toString(tableIndexOther).getBytes(); tuple[7] = Integer.toString(i + 1).getBytes(); if (columnNameRS.next()) tuple[8] = columnNameRS.getBytes(1); else tuple[8] = "".getBytes(); tuple[9] = null; // sort sequence ??? tuple[10] = r.getBytes(7); // inexact tuple[11] = r.getBytes(8); tuple[12] = null; v.addElement(tuple); } } return new ResultSet(connection, f, v, "OK", 1); } // ** JDBC 2 Extensions ** /* * Does the database support the given result set type? * * @param type - defined in java.sql.ResultSet * @return true if so; false otherwise * @exception SQLException - if a database access error occurs */ public boolean supportsResultSetType(int type) throws SQLException { // The only type we don't support return type != java.sql.ResultSet.TYPE_SCROLL_SENSITIVE; } /* * Does the database support the concurrency type in combination * with the given result set type? * * @param type - defined in java.sql.ResultSet * @param concurrency - type defined in java.sql.ResultSet * @return true if so; false otherwise * @exception SQLException - if a database access error occurs */ public boolean supportsResultSetConcurrency(int type, int concurrency) throws SQLException { // These combinations are not supported! if (type == java.sql.ResultSet.TYPE_SCROLL_SENSITIVE) return false; // We don't yet support Updateable ResultSets if (concurrency == java.sql.ResultSet.CONCUR_UPDATABLE) return false; // Everything else we do return true; } /* lots of unsupported stuff... */ public boolean ownUpdatesAreVisible(int type) throws SQLException { return false; } public boolean ownDeletesAreVisible(int type) throws SQLException { return false; } public boolean ownInsertsAreVisible(int type) throws SQLException { return false; } public boolean othersUpdatesAreVisible(int type) throws SQLException { return false; } public boolean othersDeletesAreVisible(int i) throws SQLException { return false; } public boolean othersInsertsAreVisible(int type) throws SQLException { return false; } public boolean updatesAreDetected(int type) throws SQLException { return false; } public boolean deletesAreDetected(int i) throws SQLException { return false; } public boolean insertsAreDetected(int type) throws SQLException { return false; } /* * Indicates whether the driver supports batch updates. */ public boolean supportsBatchUpdates() throws SQLException { return true; } /* * Return user defined types in a schema */ public java.sql.ResultSet getUDTs(String catalog, String schemaPattern, String typeNamePattern, int[] types ) throws SQLException { throw org.postgresql.Driver.notImplemented(); } /* * Retrieves the connection that produced this metadata object. * * @return the connection that produced this metadata object */ public java.sql.Connection getConnection() throws SQLException { return (java.sql.Connection)connection; } /* I don't find these in the spec!?! */ public boolean rowChangesAreDetected(int type) throws SQLException { return false; } public boolean rowChangesAreVisible(int type) throws SQLException { return false; } }
false
true
private java.sql.ResultSet getImportedExportedKeys(String catalog, String schema, String primaryTable, String foreignTable) throws SQLException { Field f[] = new Field[14]; f[0] = new Field(connection, "PKTABLE_CAT", iVarcharOid, 32); f[1] = new Field(connection, "PKTABLE_SCHEM", iVarcharOid, 32); f[2] = new Field(connection, "PKTABLE_NAME", iVarcharOid, 32); f[3] = new Field(connection, "PKCOLUMN_NAME", iVarcharOid, 32); f[4] = new Field(connection, "FKTABLE_CAT", iVarcharOid, 32); f[5] = new Field(connection, "FKTABLE_SCHEM", iVarcharOid, 32); f[6] = new Field(connection, "FKTABLE_NAME", iVarcharOid, 32); f[7] = new Field(connection, "FKCOLUMN_NAME", iVarcharOid, 32); f[8] = new Field(connection, "KEY_SEQ", iInt2Oid, 2); f[9] = new Field(connection, "UPDATE_RULE", iInt2Oid, 2); f[10] = new Field(connection, "DELETE_RULE", iInt2Oid, 2); f[11] = new Field(connection, "FK_NAME", iVarcharOid, 32); f[12] = new Field(connection, "PK_NAME", iVarcharOid, 32); f[13] = new Field(connection, "DEFERRABILITY", iInt2Oid, 2); java.sql.ResultSet rs = connection.ExecSQL( "SELECT " + "c.relname as prelname, " + "c2.relname as frelname, " + "t.tgconstrname, " + "a.attnum as keyseq, " + "ic.relname as fkeyname, " + "t.tgdeferrable, " + "t.tginitdeferred, " + "t.tgnargs,t.tgargs, " + "p1.proname as updaterule, " + "p2.proname as deleterule " + "FROM " + "pg_trigger t, " + "pg_trigger t1, " + "pg_class c, " + "pg_class c2, " + "pg_class ic, " + "pg_proc p1, " + "pg_proc p2, " + "pg_index i, " + "pg_attribute a " + "WHERE " // isolate the update rule + "(t.tgrelid=c.oid " + "AND t.tgisconstraint " + "AND t.tgconstrrelid=c2.oid " + "AND t.tgfoid=p1.oid " + "and p1.proname like '%%upd') " + "and " // isolate the delete rule + "(t1.tgrelid=c.oid " + "and t1.tgisconstraint " + "and t1.tgconstrrelid=c2.oid " + "AND t1.tgfoid=p2.oid " + "and p2.proname like '%%del') " // if we are looking for exported keys then primary table will be used + ((primaryTable != null) ? "AND c.relname='" + primaryTable + "' " : "") // if we are looking for imported keys then the foreign table will be used + ((foreignTable != null) ? "AND c2.relname='" + foreignTable + "' " : "") + "AND i.indrelid=c.oid " + "AND i.indexrelid=ic.oid " + "AND ic.oid=a.attrelid " + "AND i.indisprimary " + "ORDER BY " // orderby is as follows getExported, orders by FKTABLE, // getImported orders by PKTABLE // getCrossReference orders by FKTABLE, so this should work for both, // since when getting crossreference, primaryTable will be defined + (primaryTable != null ? "frelname" : "prelname") + ",keyseq"); // returns the following columns // and some example data with a table defined as follows // create table people ( id int primary key); // create table policy ( id int primary key); // create table users ( id int primary key, people_id int references people(id), policy_id int references policy(id)) // prelname | frelname | tgconstrname | keyseq | fkeyName | tgdeferrable | tginitdeferred // 1 | 2 | 3 | 4 | 5 | 6 | 7 // people | users | <unnamed> | 1 | people_pkey | f | f // | tgnargs | tgargs | updaterule | deleterule // | 8 | 9 | 10 | 11 // | 6 | <unnamed>\000users\000people\000UNSPECIFIED\000people_id\000id\000 | RI_FKey_noaction_upd | RI_FKey_noaction_del Vector tuples = new Vector(); while ( rs.next() ) { byte tuple[][] = new byte[14][]; tuple[2] = rs.getBytes(1); //PKTABLE_NAME tuple[6] = rs.getBytes(2); //FKTABLE_NAME String fKeyName = rs.getString(3); String updateRule = rs.getString(10); if (updateRule != null ) { // Rules look like this RI_FKey_noaction_del so we want to pull out the part between the 'Key_' and the last '_' s String rule = updateRule.substring(8, updateRule.length() - 4); int action = importedKeyNoAction; if ( rule == null || "noaction".equals(rule) ) action = importedKeyNoAction; if ("cascade".equals(rule)) action = importedKeyCascade; else if ("setnull".equals(rule)) action = importedKeySetNull; else if ("setdefault".equals(rule)) action = importedKeySetDefault; else if ("restrict".equals(rule)) action = importedKeyRestrict; tuple[9] = Integer.toString(action).getBytes(); } String deleteRule = rs.getString(11); if ( deleteRule != null ) { String rule = updateRule.substring(8, updateRule.length() - 4); int action = importedKeyNoAction; if ("cascade".equals(rule)) action = importedKeyCascade; else if ("setnull".equals(rule)) action = importedKeySetNull; else if ("setdefault".equals(rule)) action = importedKeySetDefault; tuple[10] = Integer.toString(action).getBytes(); } // Parse the tgargs data StringBuffer fkeyColumns = new StringBuffer(); StringBuffer pkeyColumns = new StringBuffer(); // Note, I am guessing at most of this, but it should be close // if not, please correct // the keys are in pairs and start after the first four arguments // the arguments are seperated by \000 int numColumns = (rs.getInt(8) >> 1) - 2; // get the args String targs = rs.getString(9); // start parsing from the end int pos = targs.lastIndexOf("\\000"); for (int c = 0;c < numColumns;c++) { // this should never be, since we should never get to the beginning of the string // as the number of columns should override this, but it is a safe test if (pos > -1) { int pos2 = targs.lastIndexOf("\\000", pos - 1); if (pos2 > -1) { // seperate the pkColumns by ',' s if (pkeyColumns.length() > 0) pkeyColumns.insert(0, ','); // extract the column name out 4 characters ahead essentially removing the /000 pkeyColumns.insert(0, targs.substring(pos2 + 4, pos)); //PKCOLUMN_NAME // now find the associated fkColumn pos = targs.lastIndexOf("\\000", pos2 - 1); if (pos > -1) { if (fkeyColumns.length() > 0) fkeyColumns.insert(0, ','); fkeyColumns.insert(0, targs.substring(pos + 4, pos2)); //FKCOLUMN_NAME } } } } tuple[3] = pkeyColumns.toString().getBytes(); //PKCOLUMN_NAME tuple[7] = fkeyColumns.toString().getBytes(); //FKCOLUMN_NAME tuple[8] = rs.getBytes(4); //KEY_SEQ tuple[11] = rs.getBytes(5); //FK_NAME tuple[12] = rs.getBytes(3); //PK_NAME // DEFERRABILITY int deferrability = importedKeyNotDeferrable; boolean deferrable = rs.getBoolean(6); boolean initiallyDeferred = rs.getBoolean(7); if (deferrable) { if (initiallyDeferred) deferrability = importedKeyInitiallyDeferred; else deferrability = importedKeyInitiallyImmediate; } tuple[13] = Integer.toString(deferrability).getBytes(); tuples.addElement(tuple); } return new ResultSet(connection, f, tuples, "OK", 1); }
private java.sql.ResultSet getImportedExportedKeys(String catalog, String schema, String primaryTable, String foreignTable) throws SQLException { Field f[] = new Field[14]; f[0] = new Field(connection, "PKTABLE_CAT", iVarcharOid, 32); f[1] = new Field(connection, "PKTABLE_SCHEM", iVarcharOid, 32); f[2] = new Field(connection, "PKTABLE_NAME", iVarcharOid, 32); f[3] = new Field(connection, "PKCOLUMN_NAME", iVarcharOid, 32); f[4] = new Field(connection, "FKTABLE_CAT", iVarcharOid, 32); f[5] = new Field(connection, "FKTABLE_SCHEM", iVarcharOid, 32); f[6] = new Field(connection, "FKTABLE_NAME", iVarcharOid, 32); f[7] = new Field(connection, "FKCOLUMN_NAME", iVarcharOid, 32); f[8] = new Field(connection, "KEY_SEQ", iInt2Oid, 2); f[9] = new Field(connection, "UPDATE_RULE", iInt2Oid, 2); f[10] = new Field(connection, "DELETE_RULE", iInt2Oid, 2); f[11] = new Field(connection, "FK_NAME", iVarcharOid, 32); f[12] = new Field(connection, "PK_NAME", iVarcharOid, 32); f[13] = new Field(connection, "DEFERRABILITY", iInt2Oid, 2); java.sql.ResultSet rs = connection.ExecSQL( "SELECT distinct " + "c.relname as prelname, " + "c2.relname as frelname, " + "t.tgconstrname, " + "a.attnum as keyseq, " + "ic.relname as fkeyname, " + "t.tgdeferrable, " + "t.tginitdeferred, " + "t.tgnargs,t.tgargs, " + "p1.proname as updaterule, " + "p2.proname as deleterule " + "FROM " + "pg_trigger t, " + "pg_trigger t1, " + "pg_class c, " + "pg_class c2, " + "pg_class ic, " + "pg_proc p1, " + "pg_proc p2, " + "pg_index i, " + "pg_attribute a " + "WHERE " // isolate the update rule + "(t.tgrelid=c.oid " + "AND t.tgisconstraint " + "AND t.tgconstrrelid=c2.oid " + "AND t.tgfoid=p1.oid " + "and p1.proname like '%%upd') " + "and " // isolate the delete rule + "(t1.tgrelid=c.oid " + "and t1.tgisconstraint " + "and t1.tgconstrrelid=c2.oid " + "AND t1.tgfoid=p2.oid " + "and p2.proname like '%%del') " // if we are looking for exported keys then primary table will be used + ((primaryTable != null) ? "AND c.relname='" + primaryTable + "' " : "") // if we are looking for imported keys then the foreign table will be used + ((foreignTable != null) ? "AND c2.relname='" + foreignTable + "' " : "") + "AND i.indrelid=c.oid " + "AND i.indexrelid=ic.oid " + "AND ic.oid=a.attrelid " + "AND i.indisprimary " + "ORDER BY " // orderby is as follows getExported, orders by FKTABLE, // getImported orders by PKTABLE // getCrossReference orders by FKTABLE, so this should work for both, // since when getting crossreference, primaryTable will be defined + (primaryTable != null ? "frelname" : "prelname") + ",keyseq"); // returns the following columns // and some example data with a table defined as follows // create table people ( id int primary key); // create table policy ( id int primary key); // create table users ( id int primary key, people_id int references people(id), policy_id int references policy(id)) // prelname | frelname | tgconstrname | keyseq | fkeyName | tgdeferrable | tginitdeferred // 1 | 2 | 3 | 4 | 5 | 6 | 7 // people | users | <unnamed> | 1 | people_pkey | f | f // | tgnargs | tgargs | updaterule | deleterule // | 8 | 9 | 10 | 11 // | 6 | <unnamed>\000users\000people\000UNSPECIFIED\000people_id\000id\000 | RI_FKey_noaction_upd | RI_FKey_noaction_del Vector tuples = new Vector(); while ( rs.next() ) { byte tuple[][] = new byte[14][]; tuple[2] = rs.getBytes(1); //PKTABLE_NAME tuple[6] = rs.getBytes(2); //FKTABLE_NAME String fKeyName = rs.getString(3); String updateRule = rs.getString(10); if (updateRule != null ) { // Rules look like this RI_FKey_noaction_del so we want to pull out the part between the 'Key_' and the last '_' s String rule = updateRule.substring(8, updateRule.length() - 4); int action = importedKeyNoAction; if ( rule == null || "noaction".equals(rule) ) action = importedKeyNoAction; if ("cascade".equals(rule)) action = importedKeyCascade; else if ("setnull".equals(rule)) action = importedKeySetNull; else if ("setdefault".equals(rule)) action = importedKeySetDefault; else if ("restrict".equals(rule)) action = importedKeyRestrict; tuple[9] = Integer.toString(action).getBytes(); } String deleteRule = rs.getString(11); if ( deleteRule != null ) { String rule = updateRule.substring(8, updateRule.length() - 4); int action = importedKeyNoAction; if ("cascade".equals(rule)) action = importedKeyCascade; else if ("setnull".equals(rule)) action = importedKeySetNull; else if ("setdefault".equals(rule)) action = importedKeySetDefault; tuple[10] = Integer.toString(action).getBytes(); } // Parse the tgargs data StringBuffer fkeyColumns = new StringBuffer(); StringBuffer pkeyColumns = new StringBuffer(); // Note, I am guessing at most of this, but it should be close // if not, please correct // the keys are in pairs and start after the first four arguments // the arguments are seperated by \000 int numColumns = (rs.getInt(8) >> 1) - 2; // get the args String targs = rs.getString(9); // start parsing from the end int pos = targs.lastIndexOf("\\000"); for (int c = 0;c < numColumns;c++) { // this should never be, since we should never get to the beginning of the string // as the number of columns should override this, but it is a safe test if (pos > -1) { int pos2 = targs.lastIndexOf("\\000", pos - 1); if (pos2 > -1) { // seperate the pkColumns by ',' s if (pkeyColumns.length() > 0) pkeyColumns.insert(0, ','); // extract the column name out 4 characters ahead essentially removing the /000 pkeyColumns.insert(0, targs.substring(pos2 + 4, pos)); //PKCOLUMN_NAME // now find the associated fkColumn pos = targs.lastIndexOf("\\000", pos2 - 1); if (pos > -1) { if (fkeyColumns.length() > 0) fkeyColumns.insert(0, ','); fkeyColumns.insert(0, targs.substring(pos + 4, pos2)); //FKCOLUMN_NAME } } } } tuple[3] = pkeyColumns.toString().getBytes(); //PKCOLUMN_NAME tuple[7] = fkeyColumns.toString().getBytes(); //FKCOLUMN_NAME tuple[8] = rs.getBytes(4); //KEY_SEQ tuple[11] = rs.getBytes(3); //FK_NAME tuple[12] = rs.getBytes(5); //PK_NAME // DEFERRABILITY int deferrability = importedKeyNotDeferrable; boolean deferrable = rs.getBoolean(6); boolean initiallyDeferred = rs.getBoolean(7); if (deferrable) { if (initiallyDeferred) deferrability = importedKeyInitiallyDeferred; else deferrability = importedKeyInitiallyImmediate; } tuple[13] = Integer.toString(deferrability).getBytes(); tuples.addElement(tuple); } return new ResultSet(connection, f, tuples, "OK", 1); }
diff --git a/apvs/src/main/java/ch/cern/atlas/apvs/client/APVS.java b/apvs/src/main/java/ch/cern/atlas/apvs/client/APVS.java index 5f707ced..3ca0c921 100644 --- a/apvs/src/main/java/ch/cern/atlas/apvs/client/APVS.java +++ b/apvs/src/main/java/ch/cern/atlas/apvs/client/APVS.java @@ -1,445 +1,448 @@ package ch.cern.atlas.apvs.client; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ch.cern.atlas.apvs.client.domain.Ternary; import ch.cern.atlas.apvs.client.event.ConnectionStatusChangedRemoteEvent; import ch.cern.atlas.apvs.client.event.ConnectionStatusChangedRemoteEvent.ConnectionType; import ch.cern.atlas.apvs.client.event.SelectPtuEvent; import ch.cern.atlas.apvs.client.settings.SettingsPersister; import ch.cern.atlas.apvs.client.tablet.AppBundle; import ch.cern.atlas.apvs.client.tablet.HomePlace; import ch.cern.atlas.apvs.client.tablet.LocalStorage; import ch.cern.atlas.apvs.client.tablet.TabletHistoryObserver; import ch.cern.atlas.apvs.client.tablet.TabletMenuActivityMapper; import ch.cern.atlas.apvs.client.tablet.TabletMenuAnimationMapper; import ch.cern.atlas.apvs.client.tablet.TabletPanelActivityMapper; import ch.cern.atlas.apvs.client.tablet.TabletPanelAnimationMapper; import ch.cern.atlas.apvs.client.tablet.TabletPlaceHistoryMapper; import ch.cern.atlas.apvs.client.ui.AlarmView; import ch.cern.atlas.apvs.client.ui.Arguments; import ch.cern.atlas.apvs.client.ui.AudioSummary; import ch.cern.atlas.apvs.client.ui.AudioSupervisorSettingsView; import ch.cern.atlas.apvs.client.ui.AudioView; import ch.cern.atlas.apvs.client.ui.CameraTable; import ch.cern.atlas.apvs.client.ui.CameraView; import ch.cern.atlas.apvs.client.ui.EventView; import ch.cern.atlas.apvs.client.ui.GeneralInfoView; import ch.cern.atlas.apvs.client.ui.InterventionView; import ch.cern.atlas.apvs.client.ui.MeasurementTable; import ch.cern.atlas.apvs.client.ui.MeasurementView; import ch.cern.atlas.apvs.client.ui.Module; import ch.cern.atlas.apvs.client.ui.PlaceView; import ch.cern.atlas.apvs.client.ui.ProcedureControls; import ch.cern.atlas.apvs.client.ui.ProcedureView; import ch.cern.atlas.apvs.client.ui.PtuSettingsView; import ch.cern.atlas.apvs.client.ui.PtuTabSelector; import ch.cern.atlas.apvs.client.ui.PtuView; import ch.cern.atlas.apvs.client.ui.ServerSettingsView; import ch.cern.atlas.apvs.client.ui.Tab; import ch.cern.atlas.apvs.client.ui.TimeView; import ch.cern.atlas.apvs.client.widget.DialogResultEvent; import ch.cern.atlas.apvs.client.widget.DialogResultHandler; import ch.cern.atlas.apvs.client.widget.PasswordDialog; import ch.cern.atlas.apvs.eventbus.shared.RemoteEventBus; import ch.cern.atlas.apvs.eventbus.shared.RequestRemoteEvent; import com.google.gwt.activity.shared.ActivityMapper; import com.google.gwt.core.client.EntryPoint; import com.google.gwt.core.client.GWT; import com.google.gwt.core.client.Scheduler; import com.google.gwt.core.client.Scheduler.RepeatingCommand; import com.google.gwt.dom.client.Document; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.NodeList; import com.google.gwt.dom.client.StyleInjector; import com.google.gwt.http.client.RequestBuilder; import com.google.gwt.http.client.RequestException; import com.google.gwt.place.shared.PlaceController; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwt.user.client.ui.IsWidget; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.PopupPanel.PositionCallback; import com.google.gwt.user.client.ui.RootPanel; import com.google.gwt.user.client.ui.SimplePanel; import com.googlecode.mgwt.mvp.client.AnimatableDisplay; import com.googlecode.mgwt.mvp.client.AnimatingActivityManager; import com.googlecode.mgwt.mvp.client.AnimationMapper; import com.googlecode.mgwt.mvp.client.history.MGWTPlaceHistoryHandler; import com.googlecode.mgwt.ui.client.MGWT; import com.googlecode.mgwt.ui.client.MGWTSettings; import com.googlecode.mgwt.ui.client.MGWTSettings.ViewPort; import com.googlecode.mgwt.ui.client.MGWTSettings.ViewPort.DENSITY; import com.googlecode.mgwt.ui.client.dialog.TabletPortraitOverlay; import com.googlecode.mgwt.ui.client.layout.MasterRegionHandler; import com.googlecode.mgwt.ui.client.layout.OrientationRegionHandler; /** * @author Mark Donszelmann */ public class APVS implements EntryPoint { private Logger log = LoggerFactory.getLogger(getClass().getName()); @SuppressWarnings("unused") private Window screen; private RemoteEventBus remoteEventBus; @SuppressWarnings("unused") private PlaceController placeController; @SuppressWarnings("unused") private SettingsPersister settingsPersister; private String defaultPtuId = "PTUdemo"; private ClientFactory clientFactory; private Ternary alive = Ternary.Unknown; @Override public void onModuleLoad() { GWT.setUncaughtExceptionHandler(new APVSUncaughtExceptionHandler()); Build build = GWT.create(Build.class); log.info("Starting APVS Version: " + build.version() + " - " + build.build()); clientFactory = GWT.create(ClientFactory.class); String pwd = LocalStorage.getInstance() .get(LocalStorage.SUPERVISOR_PWD); if (pwd != null) { login(pwd); } else { prompt(); } } private void login(final String pwd) { clientFactory.getServerService().isReady(pwd, new AsyncCallback<Boolean>() { @Override public void onSuccess(Boolean supervisor) { clientFactory.setSupervisor(supervisor); log.info("Server ready, user is " + (supervisor ? "SUPERVISOR" : "OBSERVER")); LocalStorage.getInstance().put(LocalStorage.SUPERVISOR_PWD, supervisor ? pwd : null); start(); } @Override public void onFailure(Throwable caught) { Window.alert("Server not ready. reload webpage " + caught); } }); } private void prompt() { final PasswordDialog pwdDialog = new PasswordDialog(); pwdDialog.addDialogResultHandler(new DialogResultHandler() { @Override public void onDialogResult(DialogResultEvent event) { login(event.getResult()); } }); pwdDialog.setModal(true); pwdDialog.setGlassEnabled(true); pwdDialog.setPopupPositionAndShow(new PositionCallback() { @Override public void setPosition(int offsetWidth, int offsetHeight) { // center pwdDialog.setPopupPosition( (Window.getClientWidth() - offsetWidth) / 3, (Window.getClientHeight() - offsetHeight) / 3); } }); } private void start() { remoteEventBus = clientFactory.getRemoteEventBus(); placeController = clientFactory.getPlaceController(); settingsPersister = new SettingsPersister(remoteEventBus); // get first div element NodeList<Element> divs = Document.get().getElementsByTagName("div"); if (divs.getLength() == 0) { Window.alert("Please define a <div> element with the class set to your view you want to show."); return; } boolean layoutOnlyMode = Window.Location.getQueryString().indexOf( "layout=true") >= 0; if (layoutOnlyMode) { log.info("Running in layoutOnly mode"); return; } boolean newCode = false; for (int i = 0; i < divs.getLength(); i++) { Element element = divs.getItem(i); String id = element.getId(); if (id.equals("footer")) { Label supervisor = new Label( clientFactory.isSupervisor() ? "Supervisor" : "Observer"); supervisor.addStyleName("footer-left"); RootPanel.get(id).insert(supervisor, 0); continue; } String[] parts = id.split("\\(", 2); if (parts.length == 2) { String className = parts[0]; if ((parts[1].length() > 0) && !parts[1].endsWith(")")) { log.warn("Missing closing parenthesis on '" + id + "'"); parts[1] += ")"; } Arguments args = new Arguments( parts[1].length() > 0 ? parts[1].substring(0, parts[1].length() - 1) : null); log.info("Creating " + className + " with args (" + args + ")"); Module module = null; // FIXME handle generically if (id.startsWith("MeasurementView")) { module = new MeasurementView(); } else if (id.startsWith("MeasurementTable")) { module = new MeasurementTable(); } else if (id.startsWith("AlarmView")) { module = new AlarmView(); } else if (id.startsWith("AudioSummary")) { module = new AudioSummary(); } else if (id.startsWith("AudioView")) { module = new AudioView(); } else if (id.startsWith("AudioSupervisorSettingsView")) { module = new AudioSupervisorSettingsView(); } else if (id.startsWith("CameraTable")) { module = new CameraTable(); } else if (id.startsWith("CameraView")) { module = new CameraView(); } else if (id.startsWith("EventView")) { module = new EventView(); } else if (id.startsWith("GeneralInfoView")) { module = new GeneralInfoView(); } else if (id.startsWith("InterventionView")) { module = new InterventionView(); } else if (id.startsWith("PlaceView")) { module = new PlaceView(); } else if (id.startsWith("ProcedureControls")) { module = new ProcedureControls(); } else if (id.startsWith("ProcedureView")) { module = new ProcedureView(); } else if (id.startsWith("PtuSettingsView")) { module = new PtuSettingsView(); } else if (id.startsWith("PtuTabSelector")) { module = new PtuTabSelector(); } else if (id.startsWith("PtuView")) { module = new PtuView(); } else if (id.startsWith("ServerSettingsView")) { module = new ServerSettingsView(); } else if (id.startsWith("Tab")) { module = new Tab(); } else if (id.startsWith("TimeView")) { module = new TimeView(); } if (module != null) { boolean add = module .configure(element, clientFactory, args); if (add && module instanceof IsWidget) { RootPanel.get(id).add((IsWidget) module); } newCode = true; } } } // FIXME create tab buttons for each, select default one clientFactory.getEventBus("ptu").fireEvent( new SelectPtuEvent(defaultPtuId)); // Server ALIVE status RequestRemoteEvent.register(remoteEventBus, new RequestRemoteEvent.Handler() { @Override public void onRequestEvent(RequestRemoteEvent event) { String type = event.getRequestedClassName(); if (type.equals(ConnectionStatusChangedRemoteEvent.class .getName())) { ConnectionStatusChangedRemoteEvent.fire(remoteEventBus, ConnectionType.server, alive); } } }); Scheduler.get().scheduleFixedDelay(new RepeatingCommand() { @Override public boolean execute() { RequestBuilder request = PingServiceAsync.Util.getInstance().ping(new AsyncCallback<Void>() { @Override public void onSuccess(Void result) { + Window.alert("Sucess"); if (!alive.isTrue()) { alive = Ternary.True; ConnectionStatusChangedRemoteEvent.fire(remoteEventBus, ConnectionType.server, alive); } } @Override public void onFailure(Throwable caught) { + Window.alert("Failure"); if (alive.isTrue()) { alive = Ternary.False; ConnectionStatusChangedRemoteEvent.fire(remoteEventBus, ConnectionType.server, alive); } } }); request.setTimeoutMillis(10000); try { request.send(); } catch (RequestException e) { Window.alert("Error "+e); } + Window.alert("Sent"); return false; } }, 20000); if (newCode) return; startWorker(); return; } private void startWorker() { // MGWTColorScheme.setBaseColor("#56a60D"); // MGWTColorScheme.setFontColor("#eee"); // // MGWTStyle.setDefaultBundle((MGWTClientBundle) // GWT.create(MGWTStandardBundle.class)); // MGWTStyle.getDefaultClientBundle().getMainCss().ensureInjected(); ViewPort viewPort = new MGWTSettings.ViewPort(); viewPort.setTargetDensity(DENSITY.MEDIUM); viewPort.setUserScaleAble(false).setMinimumScale(1.0) .setMinimumScale(1.0).setMaximumScale(1.0); MGWTSettings settings = new MGWTSettings(); settings.setViewPort(viewPort); // settings.setIconUrl("logo.png"); // settings.setAddGlosToIcon(true); settings.setFullscreen(true); settings.setPreventScrolling(true); MGWT.applySettings(settings); final ClientFactory clientFactory = new APVSClientFactory(); // Start PlaceHistoryHandler with our PlaceHistoryMapper TabletPlaceHistoryMapper historyMapper = GWT .create(TabletPlaceHistoryMapper.class); if (MGWT.getOsDetection().isTablet()) { // very nasty workaround because GWT does not corretly support // @media StyleInjector.inject(AppBundle.INSTANCE.css().getText()); createTabletDisplay(clientFactory); } else { createTabletDisplay(clientFactory); // createPhoneDisplay(clientFactory); } TabletHistoryObserver historyObserver = new TabletHistoryObserver(); MGWTPlaceHistoryHandler historyHandler = new MGWTPlaceHistoryHandler( historyMapper, historyObserver); historyHandler.register(clientFactory.getPlaceController(), clientFactory.getRemoteEventBus(), new HomePlace()); historyHandler.handleCurrentHistory(); } /* * private void createPhoneDisplay(ClientFactory clientFactory) { * AnimatableDisplay display = GWT.create(AnimatableDisplay.class); * * PhoneActivityMapper appActivityMapper = new PhoneActivityMapper( * clientFactory); * * PhoneAnimationMapper appAnimationMapper = new PhoneAnimationMapper(); * * AnimatingActivityManager activityManager = new AnimatingActivityManager( * appActivityMapper, appAnimationMapper, clientFactory.getEventBus()); * * activityManager.setDisplay(display); * * RootPanel.get().add(display); * * } */ private void createTabletDisplay(ClientFactory clientFactory) { SimplePanel navContainer = new SimplePanel(); navContainer.getElement().setId("nav"); navContainer.getElement().addClassName("landscapeonly"); AnimatableDisplay navDisplay = GWT.create(AnimatableDisplay.class); final TabletPortraitOverlay tabletPortraitOverlay = new TabletPortraitOverlay(); new OrientationRegionHandler(navContainer, tabletPortraitOverlay, navDisplay); new MasterRegionHandler(clientFactory.getRemoteEventBus(), "nav", tabletPortraitOverlay); ActivityMapper navActivityMapper = new TabletMenuActivityMapper( clientFactory); AnimationMapper navAnimationMapper = new TabletMenuAnimationMapper(); AnimatingActivityManager navActivityManager = new AnimatingActivityManager( navActivityMapper, navAnimationMapper, clientFactory.getRemoteEventBus()); navActivityManager.setDisplay(navDisplay); RootPanel.get().add(navContainer); SimplePanel mainContainer = new SimplePanel(); mainContainer.getElement().setId("main"); AnimatableDisplay mainDisplay = GWT.create(AnimatableDisplay.class); TabletPanelActivityMapper tabletMainActivityMapper = new TabletPanelActivityMapper( clientFactory); AnimationMapper tabletMainAnimationMapper = new TabletPanelAnimationMapper(); AnimatingActivityManager mainActivityManager = new AnimatingActivityManager( tabletMainActivityMapper, tabletMainAnimationMapper, clientFactory.getRemoteEventBus()); mainActivityManager.setDisplay(mainDisplay); mainContainer.setWidget(mainDisplay); RootPanel.get().add(mainContainer); } }
false
true
private void start() { remoteEventBus = clientFactory.getRemoteEventBus(); placeController = clientFactory.getPlaceController(); settingsPersister = new SettingsPersister(remoteEventBus); // get first div element NodeList<Element> divs = Document.get().getElementsByTagName("div"); if (divs.getLength() == 0) { Window.alert("Please define a <div> element with the class set to your view you want to show."); return; } boolean layoutOnlyMode = Window.Location.getQueryString().indexOf( "layout=true") >= 0; if (layoutOnlyMode) { log.info("Running in layoutOnly mode"); return; } boolean newCode = false; for (int i = 0; i < divs.getLength(); i++) { Element element = divs.getItem(i); String id = element.getId(); if (id.equals("footer")) { Label supervisor = new Label( clientFactory.isSupervisor() ? "Supervisor" : "Observer"); supervisor.addStyleName("footer-left"); RootPanel.get(id).insert(supervisor, 0); continue; } String[] parts = id.split("\\(", 2); if (parts.length == 2) { String className = parts[0]; if ((parts[1].length() > 0) && !parts[1].endsWith(")")) { log.warn("Missing closing parenthesis on '" + id + "'"); parts[1] += ")"; } Arguments args = new Arguments( parts[1].length() > 0 ? parts[1].substring(0, parts[1].length() - 1) : null); log.info("Creating " + className + " with args (" + args + ")"); Module module = null; // FIXME handle generically if (id.startsWith("MeasurementView")) { module = new MeasurementView(); } else if (id.startsWith("MeasurementTable")) { module = new MeasurementTable(); } else if (id.startsWith("AlarmView")) { module = new AlarmView(); } else if (id.startsWith("AudioSummary")) { module = new AudioSummary(); } else if (id.startsWith("AudioView")) { module = new AudioView(); } else if (id.startsWith("AudioSupervisorSettingsView")) { module = new AudioSupervisorSettingsView(); } else if (id.startsWith("CameraTable")) { module = new CameraTable(); } else if (id.startsWith("CameraView")) { module = new CameraView(); } else if (id.startsWith("EventView")) { module = new EventView(); } else if (id.startsWith("GeneralInfoView")) { module = new GeneralInfoView(); } else if (id.startsWith("InterventionView")) { module = new InterventionView(); } else if (id.startsWith("PlaceView")) { module = new PlaceView(); } else if (id.startsWith("ProcedureControls")) { module = new ProcedureControls(); } else if (id.startsWith("ProcedureView")) { module = new ProcedureView(); } else if (id.startsWith("PtuSettingsView")) { module = new PtuSettingsView(); } else if (id.startsWith("PtuTabSelector")) { module = new PtuTabSelector(); } else if (id.startsWith("PtuView")) { module = new PtuView(); } else if (id.startsWith("ServerSettingsView")) { module = new ServerSettingsView(); } else if (id.startsWith("Tab")) { module = new Tab(); } else if (id.startsWith("TimeView")) { module = new TimeView(); } if (module != null) { boolean add = module .configure(element, clientFactory, args); if (add && module instanceof IsWidget) { RootPanel.get(id).add((IsWidget) module); } newCode = true; } } } // FIXME create tab buttons for each, select default one clientFactory.getEventBus("ptu").fireEvent( new SelectPtuEvent(defaultPtuId)); // Server ALIVE status RequestRemoteEvent.register(remoteEventBus, new RequestRemoteEvent.Handler() { @Override public void onRequestEvent(RequestRemoteEvent event) { String type = event.getRequestedClassName(); if (type.equals(ConnectionStatusChangedRemoteEvent.class .getName())) { ConnectionStatusChangedRemoteEvent.fire(remoteEventBus, ConnectionType.server, alive); } } }); Scheduler.get().scheduleFixedDelay(new RepeatingCommand() { @Override public boolean execute() { RequestBuilder request = PingServiceAsync.Util.getInstance().ping(new AsyncCallback<Void>() { @Override public void onSuccess(Void result) { if (!alive.isTrue()) { alive = Ternary.True; ConnectionStatusChangedRemoteEvent.fire(remoteEventBus, ConnectionType.server, alive); } } @Override public void onFailure(Throwable caught) { if (alive.isTrue()) { alive = Ternary.False; ConnectionStatusChangedRemoteEvent.fire(remoteEventBus, ConnectionType.server, alive); } } }); request.setTimeoutMillis(10000); try { request.send(); } catch (RequestException e) { Window.alert("Error "+e); } return false; } }, 20000); if (newCode) return; startWorker(); return; }
private void start() { remoteEventBus = clientFactory.getRemoteEventBus(); placeController = clientFactory.getPlaceController(); settingsPersister = new SettingsPersister(remoteEventBus); // get first div element NodeList<Element> divs = Document.get().getElementsByTagName("div"); if (divs.getLength() == 0) { Window.alert("Please define a <div> element with the class set to your view you want to show."); return; } boolean layoutOnlyMode = Window.Location.getQueryString().indexOf( "layout=true") >= 0; if (layoutOnlyMode) { log.info("Running in layoutOnly mode"); return; } boolean newCode = false; for (int i = 0; i < divs.getLength(); i++) { Element element = divs.getItem(i); String id = element.getId(); if (id.equals("footer")) { Label supervisor = new Label( clientFactory.isSupervisor() ? "Supervisor" : "Observer"); supervisor.addStyleName("footer-left"); RootPanel.get(id).insert(supervisor, 0); continue; } String[] parts = id.split("\\(", 2); if (parts.length == 2) { String className = parts[0]; if ((parts[1].length() > 0) && !parts[1].endsWith(")")) { log.warn("Missing closing parenthesis on '" + id + "'"); parts[1] += ")"; } Arguments args = new Arguments( parts[1].length() > 0 ? parts[1].substring(0, parts[1].length() - 1) : null); log.info("Creating " + className + " with args (" + args + ")"); Module module = null; // FIXME handle generically if (id.startsWith("MeasurementView")) { module = new MeasurementView(); } else if (id.startsWith("MeasurementTable")) { module = new MeasurementTable(); } else if (id.startsWith("AlarmView")) { module = new AlarmView(); } else if (id.startsWith("AudioSummary")) { module = new AudioSummary(); } else if (id.startsWith("AudioView")) { module = new AudioView(); } else if (id.startsWith("AudioSupervisorSettingsView")) { module = new AudioSupervisorSettingsView(); } else if (id.startsWith("CameraTable")) { module = new CameraTable(); } else if (id.startsWith("CameraView")) { module = new CameraView(); } else if (id.startsWith("EventView")) { module = new EventView(); } else if (id.startsWith("GeneralInfoView")) { module = new GeneralInfoView(); } else if (id.startsWith("InterventionView")) { module = new InterventionView(); } else if (id.startsWith("PlaceView")) { module = new PlaceView(); } else if (id.startsWith("ProcedureControls")) { module = new ProcedureControls(); } else if (id.startsWith("ProcedureView")) { module = new ProcedureView(); } else if (id.startsWith("PtuSettingsView")) { module = new PtuSettingsView(); } else if (id.startsWith("PtuTabSelector")) { module = new PtuTabSelector(); } else if (id.startsWith("PtuView")) { module = new PtuView(); } else if (id.startsWith("ServerSettingsView")) { module = new ServerSettingsView(); } else if (id.startsWith("Tab")) { module = new Tab(); } else if (id.startsWith("TimeView")) { module = new TimeView(); } if (module != null) { boolean add = module .configure(element, clientFactory, args); if (add && module instanceof IsWidget) { RootPanel.get(id).add((IsWidget) module); } newCode = true; } } } // FIXME create tab buttons for each, select default one clientFactory.getEventBus("ptu").fireEvent( new SelectPtuEvent(defaultPtuId)); // Server ALIVE status RequestRemoteEvent.register(remoteEventBus, new RequestRemoteEvent.Handler() { @Override public void onRequestEvent(RequestRemoteEvent event) { String type = event.getRequestedClassName(); if (type.equals(ConnectionStatusChangedRemoteEvent.class .getName())) { ConnectionStatusChangedRemoteEvent.fire(remoteEventBus, ConnectionType.server, alive); } } }); Scheduler.get().scheduleFixedDelay(new RepeatingCommand() { @Override public boolean execute() { RequestBuilder request = PingServiceAsync.Util.getInstance().ping(new AsyncCallback<Void>() { @Override public void onSuccess(Void result) { Window.alert("Sucess"); if (!alive.isTrue()) { alive = Ternary.True; ConnectionStatusChangedRemoteEvent.fire(remoteEventBus, ConnectionType.server, alive); } } @Override public void onFailure(Throwable caught) { Window.alert("Failure"); if (alive.isTrue()) { alive = Ternary.False; ConnectionStatusChangedRemoteEvent.fire(remoteEventBus, ConnectionType.server, alive); } } }); request.setTimeoutMillis(10000); try { request.send(); } catch (RequestException e) { Window.alert("Error "+e); } Window.alert("Sent"); return false; } }, 20000); if (newCode) return; startWorker(); return; }
diff --git a/netbeans-suite/core/src/org/jcae/netbeans/viewer3d/actions/SelectionRectangle.java b/netbeans-suite/core/src/org/jcae/netbeans/viewer3d/actions/SelectionRectangle.java index 59e2eaa1..7af99fdd 100644 --- a/netbeans-suite/core/src/org/jcae/netbeans/viewer3d/actions/SelectionRectangle.java +++ b/netbeans-suite/core/src/org/jcae/netbeans/viewer3d/actions/SelectionRectangle.java @@ -1,45 +1,45 @@ /* * Project Info: http://jcae.sourceforge.net * * This program is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free * Software Foundation; either version 2.1 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. * * (C) Copyright 2008, by EADS France */ package org.jcae.netbeans.viewer3d.actions; import javax.swing.Action; import javax.swing.ImageIcon; import org.jcae.vtk.View; public class SelectionRectangle extends ViewAction { private static ImageIcon icon = new ImageIcon(SelectionRectangle.class.getResource("stock_crop.png")); /** * */ public SelectionRectangle() { - putValue(Action.NAME, "Selection rectangle"); - putValue(Action.SHORT_DESCRIPTION, "Selection rectangle"); + putValue(Action.NAME, "Rectangle selection"); + putValue(Action.SHORT_DESCRIPTION, "Rectangle selection"); putValue(Action.SMALL_ICON, icon); setIcon(icon); } public void actionPerformed(View view) { view.setMouseMode(View.MouseMode.RECTANGLE_SELECTION); } }
true
true
public SelectionRectangle() { putValue(Action.NAME, "Selection rectangle"); putValue(Action.SHORT_DESCRIPTION, "Selection rectangle"); putValue(Action.SMALL_ICON, icon); setIcon(icon); }
public SelectionRectangle() { putValue(Action.NAME, "Rectangle selection"); putValue(Action.SHORT_DESCRIPTION, "Rectangle selection"); putValue(Action.SMALL_ICON, icon); setIcon(icon); }
diff --git a/modules/core/src/main/java/com/github/wuic/engine/impl/embedded/CGImageAggregatorEngine.java b/modules/core/src/main/java/com/github/wuic/engine/impl/embedded/CGImageAggregatorEngine.java index a9ef3b7..8f54bef 100644 --- a/modules/core/src/main/java/com/github/wuic/engine/impl/embedded/CGImageAggregatorEngine.java +++ b/modules/core/src/main/java/com/github/wuic/engine/impl/embedded/CGImageAggregatorEngine.java @@ -1,228 +1,228 @@ /* * "Copyright (c) 2013 Capgemini Technology Services (hereinafter "Capgemini") * * License/Terms of Use * Permission is hereby granted, free of charge and for the term of intellectual * property rights on the Software, to any person obtaining a copy of this software * and associated documentation files (the "Software"), to use, copy, modify and * propagate free of charge, anywhere in the world, all or part of the Software * subject to the following mandatory conditions: * * - The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * Any failure to comply with the above shall automatically terminate the license * and be construed as a breach of these Terms of Use causing significant harm to * Capgemini. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, PEACEFUL ENJOYMENT, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS * OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * Except as contained in this notice, the name of Capgemini shall not be used in * advertising or otherwise to promote the use or other dealings in this Software * without prior written authorization from Capgemini. * * These Terms of Use are subject to French law. * * IMPORTANT NOTICE: The WUIC software implements software components governed by * open source software licenses (BSD and Apache) of which CAPGEMINI is not the * author or the editor. The rights granted on the said software components are * governed by the specific terms and conditions specified by Apache 2.0 and BSD * licenses." */ package com.github.wuic.engine.impl.embedded; import com.github.wuic.exception.WuicException; import com.github.wuic.exception.wrapper.BadClassException; import com.github.wuic.exception.wrapper.StreamException; import com.github.wuic.exception.xml.WuicXmlReadException; import com.github.wuic.resource.impl.ByteArrayWuicResource; import com.github.wuic.FileType; import com.github.wuic.resource.WuicResource; import com.github.wuic.configuration.Configuration; import com.github.wuic.configuration.ImageConfiguration; import com.github.wuic.engine.EngineRequest; import com.github.wuic.engine.PackerEngine; import com.github.wuic.engine.Region; import com.github.wuic.util.IOUtils; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Image; import java.awt.Toolkit; import java.awt.image.BufferedImage; import java.awt.image.FilteredImageSource; import java.awt.image.ImageFilter; import java.awt.image.ImageProducer; import java.awt.image.RGBImageFilter; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Map.Entry; import javax.imageio.ImageIO; /** * <p> * This engine is in charge to merge images into one final image. * </p> * * @author Guillaume DROUET * @version 1.4 * @since 0.2.0 */ public class CGImageAggregatorEngine extends PackerEngine { /** * The path name when images are aggregated. */ public static final String AGGREGATION_NAME = "aggregate.png"; /** * The configuration. */ private ImageConfiguration configuration; /** * <p> * Builds a new aggregator engine. * </p> * * @param config the configuration * @throws com.github.wuic.exception.xml.WuicXmlReadException if a bad configuration is detected */ public CGImageAggregatorEngine(final Configuration config) throws WuicXmlReadException { if (config instanceof ImageConfiguration) { configuration = (ImageConfiguration) config; setDimensionPacker(configuration.createDimensionPacker()); } else { throw new BadClassException(config, ImageConfiguration.class); } } /** * {@inheritDoc} */ @Override public List<WuicResource> parse(final EngineRequest request) throws WuicException { /* * Do nothing if the configuration says that no aggregation should be done */ - if (!works() || request.getResources().size() <= 1) { + if (!works()) { return request.getResources(); } else { final Map<Region, WuicResource> packed = pack(request.getResources()); // Initializing the final image final Dimension finalDim = getDimensionPack(); final BufferedImage transparentImage = makeTransparentImage((int) finalDim.getWidth(), (int) finalDim.getHeight()); // Merge each image into the final image for (Entry<Region, WuicResource> entry : packed.entrySet()) { InputStream is = null; try { is = entry.getValue().openStream(); final BufferedImage buff = ImageIO.read(is); final Region r = entry.getKey(); transparentImage.createGraphics().drawImage(buff, r.getxPosition(), r.getyPosition(), null); } catch (IOException ioe) { throw new StreamException(ioe); } finally { IOUtils.close(is); } } // Write the generated image as a WUIC resource to return it final ByteArrayOutputStream bos = new ByteArrayOutputStream(); try { ImageIO.write(transparentImage, "png", bos); } catch (IOException ioe) { throw new StreamException(ioe); } final WuicResource res = new ByteArrayWuicResource(bos.toByteArray(), AGGREGATION_NAME, FileType.PNG); return Arrays.asList(res); } } /** * <p> * Makes a transparent image of the given dimensions. * </p> * * @param width the image width * @param height the image height * @return transparent image */ public static BufferedImage makeTransparentImage(final int width, final int height) { // Create an image with the given dimension final BufferedImage img = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); // Now filter the image to make all pixels transparent final ImageFilter filter = new TransparentImageFilter(); final ImageProducer ip = new FilteredImageSource(img.getSource(), filter); final Image image = Toolkit.getDefaultToolkit().createImage(ip); // Write the resulting image in the buffered image to return final BufferedImage bufferedImage = new BufferedImage(width, height, img.getType()); final Graphics graphics = bufferedImage.createGraphics(); graphics.drawImage(image, 0, 0, null); graphics.dispose(); return bufferedImage; } /** * <p> * This filter helps make an image transparent. * </p> * * @author Guillaume DROUET * @version 1.1 * @since 0.2.0 */ private static class TransparentImageFilter extends RGBImageFilter { /** * Filter value. */ private static final int FILTER_OFFSET = 0x00FFFFFF; /** * {@inheritDoc} */ @Override public final int filterRGB(final int x, final int y, final int rgb) { return FILTER_OFFSET & rgb; } } /** * {@inheritDoc} */ @Override public Configuration getConfiguration() { return configuration; } /** * {@inheritDoc} */ @Override public Boolean works() { return configuration.aggregate(); } }
true
true
public List<WuicResource> parse(final EngineRequest request) throws WuicException { /* * Do nothing if the configuration says that no aggregation should be done */ if (!works() || request.getResources().size() <= 1) { return request.getResources(); } else { final Map<Region, WuicResource> packed = pack(request.getResources()); // Initializing the final image final Dimension finalDim = getDimensionPack(); final BufferedImage transparentImage = makeTransparentImage((int) finalDim.getWidth(), (int) finalDim.getHeight()); // Merge each image into the final image for (Entry<Region, WuicResource> entry : packed.entrySet()) { InputStream is = null; try { is = entry.getValue().openStream(); final BufferedImage buff = ImageIO.read(is); final Region r = entry.getKey(); transparentImage.createGraphics().drawImage(buff, r.getxPosition(), r.getyPosition(), null); } catch (IOException ioe) { throw new StreamException(ioe); } finally { IOUtils.close(is); } } // Write the generated image as a WUIC resource to return it final ByteArrayOutputStream bos = new ByteArrayOutputStream(); try { ImageIO.write(transparentImage, "png", bos); } catch (IOException ioe) { throw new StreamException(ioe); } final WuicResource res = new ByteArrayWuicResource(bos.toByteArray(), AGGREGATION_NAME, FileType.PNG); return Arrays.asList(res); } }
public List<WuicResource> parse(final EngineRequest request) throws WuicException { /* * Do nothing if the configuration says that no aggregation should be done */ if (!works()) { return request.getResources(); } else { final Map<Region, WuicResource> packed = pack(request.getResources()); // Initializing the final image final Dimension finalDim = getDimensionPack(); final BufferedImage transparentImage = makeTransparentImage((int) finalDim.getWidth(), (int) finalDim.getHeight()); // Merge each image into the final image for (Entry<Region, WuicResource> entry : packed.entrySet()) { InputStream is = null; try { is = entry.getValue().openStream(); final BufferedImage buff = ImageIO.read(is); final Region r = entry.getKey(); transparentImage.createGraphics().drawImage(buff, r.getxPosition(), r.getyPosition(), null); } catch (IOException ioe) { throw new StreamException(ioe); } finally { IOUtils.close(is); } } // Write the generated image as a WUIC resource to return it final ByteArrayOutputStream bos = new ByteArrayOutputStream(); try { ImageIO.write(transparentImage, "png", bos); } catch (IOException ioe) { throw new StreamException(ioe); } final WuicResource res = new ByteArrayWuicResource(bos.toByteArray(), AGGREGATION_NAME, FileType.PNG); return Arrays.asList(res); } }
diff --git a/structures/memorials/ComponentGSMemorial.java b/structures/memorials/ComponentGSMemorial.java index feb42a9..5b24816 100644 --- a/structures/memorials/ComponentGSMemorial.java +++ b/structures/memorials/ComponentGSMemorial.java @@ -1,68 +1,68 @@ package GraveStone.structures.memorials; import java.util.Random; import GraveStone.block.BlockGSMemorial; import GraveStone.structures.BoundingBoxHelper; import GraveStone.structures.ComponentGraveStone; import GraveStone.structures.MemorialGenerationHelper; import net.minecraft.block.Block; import net.minecraft.world.World; import net.minecraft.world.biome.BiomeGenBase; /** * GraveStone mod * * @author NightKosh * @license Lesser GNU Public License v3 (http://www.gnu.org/licenses/lgpl.html) */ public class ComponentGSMemorial extends ComponentGraveStone { public static final int X_LENGTH = 3; public static final int HEIGHT = 7; public static final int Z_LENGTH = 3; public ComponentGSMemorial(int direction, Random random, int x, int z) { super(direction); boundingBox = BoundingBoxHelper.getCorrectBox(direction, x + (16 - X_LENGTH) / 2, 64, z + (16 - Z_LENGTH) / 2, X_LENGTH, HEIGHT, Z_LENGTH, 0); } /** * Build component */ @Override public boolean addComponentParts(World world, Random random) { int averageGroundLevel = BoundingBoxHelper.getAverageGroundLevel(world, boundingBox); if (averageGroundLevel < 0) { return true; } this.boundingBox.offset(0, averageGroundLevel - boundingBox.maxY + HEIGHT - 1, 0); int groundID; BiomeGenBase biom = world.getBiomeGenForCoords(getXWithOffset(0, 0), getZWithOffset(0, 0)); - if (biom.equals(BiomeGenBase.desert) || biom.equals(BiomeGenBase.desertHills)) { + if (biom.biomeID == BiomeGenBase.desert.biomeID || biom.biomeID == BiomeGenBase.desertHills.biomeID || biom.biomeID == BiomeGenBase.beach.biomeID) { groundID = Block.sand.blockID; } else { groundID = Block.grass.blockID; } this.fillWithAir(world, boundingBox, 0, 0, 2, 0, 6, 2); this.fillWithBlocks(world, boundingBox, 0, 0, 0, 2, 0, 2, groundID, Block.grass.blockID, false); byte memorialType = BlockGSMemorial.getMemorialType(random, 0); MemorialGenerationHelper.placeMemorial(this, world, random, 1, 1, 1, BlockGSMemorial.getMetaDirection(coordBaseMode), memorialType); for (int x = 0; x < 3; x++) { for (int z = 0; z < 3; z++) { this.fillCurrentPositionBlocksDownwards(world, groundID, 0, x, -1, z, boundingBox); } } for (int x = 0; x < 3; x++) { for (int z = 0; z < 3; z++) { this.clearCurrentPositionBlocksUpwards(world, x, HEIGHT, z, boundingBox); } } return true; } }
true
true
public boolean addComponentParts(World world, Random random) { int averageGroundLevel = BoundingBoxHelper.getAverageGroundLevel(world, boundingBox); if (averageGroundLevel < 0) { return true; } this.boundingBox.offset(0, averageGroundLevel - boundingBox.maxY + HEIGHT - 1, 0); int groundID; BiomeGenBase biom = world.getBiomeGenForCoords(getXWithOffset(0, 0), getZWithOffset(0, 0)); if (biom.equals(BiomeGenBase.desert) || biom.equals(BiomeGenBase.desertHills)) { groundID = Block.sand.blockID; } else { groundID = Block.grass.blockID; } this.fillWithAir(world, boundingBox, 0, 0, 2, 0, 6, 2); this.fillWithBlocks(world, boundingBox, 0, 0, 0, 2, 0, 2, groundID, Block.grass.blockID, false); byte memorialType = BlockGSMemorial.getMemorialType(random, 0); MemorialGenerationHelper.placeMemorial(this, world, random, 1, 1, 1, BlockGSMemorial.getMetaDirection(coordBaseMode), memorialType); for (int x = 0; x < 3; x++) { for (int z = 0; z < 3; z++) { this.fillCurrentPositionBlocksDownwards(world, groundID, 0, x, -1, z, boundingBox); } } for (int x = 0; x < 3; x++) { for (int z = 0; z < 3; z++) { this.clearCurrentPositionBlocksUpwards(world, x, HEIGHT, z, boundingBox); } } return true; }
public boolean addComponentParts(World world, Random random) { int averageGroundLevel = BoundingBoxHelper.getAverageGroundLevel(world, boundingBox); if (averageGroundLevel < 0) { return true; } this.boundingBox.offset(0, averageGroundLevel - boundingBox.maxY + HEIGHT - 1, 0); int groundID; BiomeGenBase biom = world.getBiomeGenForCoords(getXWithOffset(0, 0), getZWithOffset(0, 0)); if (biom.biomeID == BiomeGenBase.desert.biomeID || biom.biomeID == BiomeGenBase.desertHills.biomeID || biom.biomeID == BiomeGenBase.beach.biomeID) { groundID = Block.sand.blockID; } else { groundID = Block.grass.blockID; } this.fillWithAir(world, boundingBox, 0, 0, 2, 0, 6, 2); this.fillWithBlocks(world, boundingBox, 0, 0, 0, 2, 0, 2, groundID, Block.grass.blockID, false); byte memorialType = BlockGSMemorial.getMemorialType(random, 0); MemorialGenerationHelper.placeMemorial(this, world, random, 1, 1, 1, BlockGSMemorial.getMetaDirection(coordBaseMode), memorialType); for (int x = 0; x < 3; x++) { for (int z = 0; z < 3; z++) { this.fillCurrentPositionBlocksDownwards(world, groundID, 0, x, -1, z, boundingBox); } } for (int x = 0; x < 3; x++) { for (int z = 0; z < 3; z++) { this.clearCurrentPositionBlocksUpwards(world, x, HEIGHT, z, boundingBox); } } return true; }
diff --git a/unsupervised/jally/app/reports/TeamReport.java b/unsupervised/jally/app/reports/TeamReport.java index bd19da1..9a445bc 100644 --- a/unsupervised/jally/app/reports/TeamReport.java +++ b/unsupervised/jally/app/reports/TeamReport.java @@ -1,63 +1,65 @@ package reports; import java.util.List; import models.Iteration; /** * object model representing a burndown report- consumable by generators * * @author dlange * */ public class TeamReport extends Report { public String project; public int avgVelocity; public int avgPrecision; public int avgStoryPts; public List<Iteration> iterations; public BurndownReport burndown; public TeamReport(Iteration iteration) { this.project = iteration.team.name; this.avgVelocity = iteration.completedPoints; this.avgPrecision = calcPrecision(iteration.completedPoints, iteration.totalPoints); this.avgStoryPts = 0; // go back last 5 sprints this.iterations = teamIterations(iteration.team.id, 5); this.burndown = new BurndownReport(iteration); } /** * Answer back last n iterations * @param team * @return */ private List<Iteration> teamIterations(Long teamId, int n) { int totalVelocity = 0; int totalPlanned = 0; List<Iteration> iterations = Iteration.getLastNTeamIterations(n, teamId); - if (iterations.size() > 0) { + if (iterations.size() > 1) { + // pop off last one + iterations.remove(iterations.size()-1); for (Iteration iteration : iterations) { totalVelocity += iteration.completedPoints; totalPlanned += iteration.totalPoints; } this.avgVelocity = (int) (totalVelocity / iterations.size()); this.avgPrecision = (int) ((totalVelocity * 100) / totalPlanned); } return iterations; } private int calcPrecision(int completed, int total) { // TODO needs to be real return (int) ((total > 0)? ((completed * 100.0)/total):0); } public Report generate() { htmlReport = views.html.teamreport.render(this).body(); generated = true; return this; } }
true
true
private List<Iteration> teamIterations(Long teamId, int n) { int totalVelocity = 0; int totalPlanned = 0; List<Iteration> iterations = Iteration.getLastNTeamIterations(n, teamId); if (iterations.size() > 0) { for (Iteration iteration : iterations) { totalVelocity += iteration.completedPoints; totalPlanned += iteration.totalPoints; } this.avgVelocity = (int) (totalVelocity / iterations.size()); this.avgPrecision = (int) ((totalVelocity * 100) / totalPlanned); } return iterations; }
private List<Iteration> teamIterations(Long teamId, int n) { int totalVelocity = 0; int totalPlanned = 0; List<Iteration> iterations = Iteration.getLastNTeamIterations(n, teamId); if (iterations.size() > 1) { // pop off last one iterations.remove(iterations.size()-1); for (Iteration iteration : iterations) { totalVelocity += iteration.completedPoints; totalPlanned += iteration.totalPoints; } this.avgVelocity = (int) (totalVelocity / iterations.size()); this.avgPrecision = (int) ((totalVelocity * 100) / totalPlanned); } return iterations; }
diff --git a/src/test/java/org/atlasapi/application/v3/ApplicationConfigurationTranslatorTest.java b/src/test/java/org/atlasapi/application/v3/ApplicationConfigurationTranslatorTest.java index 14c29ef..7bab816 100644 --- a/src/test/java/org/atlasapi/application/v3/ApplicationConfigurationTranslatorTest.java +++ b/src/test/java/org/atlasapi/application/v3/ApplicationConfigurationTranslatorTest.java @@ -1,42 +1,43 @@ package org.atlasapi.application.v3; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import org.atlasapi.application.v3.ApplicationConfiguration; import org.atlasapi.application.v3.ApplicationConfigurationTranslator; import org.atlasapi.media.entity.Publisher; import org.junit.Test; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.mongodb.DBObject; public class ApplicationConfigurationTranslatorTest { private final ApplicationConfigurationTranslator codec = new ApplicationConfigurationTranslator(); @Test public void testEncodesAndDecodesApplicationConfiguration() { ApplicationConfiguration config = ApplicationConfiguration.defaultConfiguration() - .request(Publisher.PA) - .approve(Publisher.PA) - .enable(Publisher.PA) - .copyWithPrecedence(ImmutableList.of(Publisher.PA, Publisher.BBC)) + .enable(Publisher.BBC) + .request(Publisher.ITV) + .approve(Publisher.ITV) + .enable(Publisher.ITV) + .copyWithPrecedence(ImmutableList.of(Publisher.ITV, Publisher.BBC)) .copyWithWritableSources(ImmutableSet.of(Publisher.ITV)); DBObject dbo = codec.toDBObject(config); ApplicationConfiguration decoded = codec.fromDBObject(dbo); - assertTrue(decoded.isEnabled(Publisher.PA)); + assertTrue(decoded.isEnabled(Publisher.ITV)); assertTrue(decoded.isEnabled(Publisher.BBC)); - assertThat(decoded.orderdPublishers().get(0), is(Publisher.PA)); + assertThat(decoded.orderdPublishers().get(0), is(Publisher.ITV)); assertThat(decoded.orderdPublishers().get(1), is(Publisher.BBC)); assertTrue(decoded.canWrite(Publisher.ITV)); } }
false
true
public void testEncodesAndDecodesApplicationConfiguration() { ApplicationConfiguration config = ApplicationConfiguration.defaultConfiguration() .request(Publisher.PA) .approve(Publisher.PA) .enable(Publisher.PA) .copyWithPrecedence(ImmutableList.of(Publisher.PA, Publisher.BBC)) .copyWithWritableSources(ImmutableSet.of(Publisher.ITV)); DBObject dbo = codec.toDBObject(config); ApplicationConfiguration decoded = codec.fromDBObject(dbo); assertTrue(decoded.isEnabled(Publisher.PA)); assertTrue(decoded.isEnabled(Publisher.BBC)); assertThat(decoded.orderdPublishers().get(0), is(Publisher.PA)); assertThat(decoded.orderdPublishers().get(1), is(Publisher.BBC)); assertTrue(decoded.canWrite(Publisher.ITV)); }
public void testEncodesAndDecodesApplicationConfiguration() { ApplicationConfiguration config = ApplicationConfiguration.defaultConfiguration() .enable(Publisher.BBC) .request(Publisher.ITV) .approve(Publisher.ITV) .enable(Publisher.ITV) .copyWithPrecedence(ImmutableList.of(Publisher.ITV, Publisher.BBC)) .copyWithWritableSources(ImmutableSet.of(Publisher.ITV)); DBObject dbo = codec.toDBObject(config); ApplicationConfiguration decoded = codec.fromDBObject(dbo); assertTrue(decoded.isEnabled(Publisher.ITV)); assertTrue(decoded.isEnabled(Publisher.BBC)); assertThat(decoded.orderdPublishers().get(0), is(Publisher.ITV)); assertThat(decoded.orderdPublishers().get(1), is(Publisher.BBC)); assertTrue(decoded.canWrite(Publisher.ITV)); }
diff --git a/src/me/confuserr/banmanager/data/MuteData.java b/src/me/confuserr/banmanager/data/MuteData.java index 87232a0..f4b04d7 100644 --- a/src/me/confuserr/banmanager/data/MuteData.java +++ b/src/me/confuserr/banmanager/data/MuteData.java @@ -1,37 +1,38 @@ package me.confuserr.banmanager.data; public class MuteData { private String muted; private long expires; private String reason; private long time; private String by; public MuteData(String dmuted, long dexpires, String dreason, long dtime, String dby) { muted = dmuted; expires = dexpires; reason = dreason; by = dby; + time = dtime; } public String getMuted() { return muted; } public long getExpires() { return expires; } public String getReason() { return reason; } public long getTime() { return time; } public String getBy() { return by; } }
true
true
public MuteData(String dmuted, long dexpires, String dreason, long dtime, String dby) { muted = dmuted; expires = dexpires; reason = dreason; by = dby; }
public MuteData(String dmuted, long dexpires, String dreason, long dtime, String dby) { muted = dmuted; expires = dexpires; reason = dreason; by = dby; time = dtime; }
diff --git a/src/main/java/org/javadrop/packaging/impl/BasePackagerStrategy.java b/src/main/java/org/javadrop/packaging/impl/BasePackagerStrategy.java index 09a1f2d..4a64966 100644 --- a/src/main/java/org/javadrop/packaging/impl/BasePackagerStrategy.java +++ b/src/main/java/org/javadrop/packaging/impl/BasePackagerStrategy.java @@ -1,134 +1,139 @@ /******************************************************************************* * Copyright 2011 iovation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package org.javadrop.packaging.impl; import java.io.File; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.logging.Log; import org.javadrop.TemplateProcessor; import org.javadrop.packaging.PackagerStrategy; import org.javadrop.runner.RunnerStrategy; /** * This goal combines a 'runner strategy' and a 'packager strategy' together to produce * an artifact that can be installed. * * This plugin is intended to keep the configuration in a pom to a minimum. The various * strategies could get fairly complex in their processing. * */ public abstract class BasePackagerStrategy implements PackagerStrategy { /** * Variables that are used by the packager template(s) to create the final scripts that are needed * for the final packager solution. */ protected Map<String, String> packagerVariables; /** * Maven mojo log */ private Log _log; private void applyDefaults() { packagerVariables = new HashMap<String, String>(); packagerVariables.put("PKG_NAME", "service"); packagerVariables.put("PKG_INSTALL_LOC","/usr/local/javadrop/service"); packagerVariables.put("PKG_USER", "javadrop"); packagerVariables.put("PKG_GROUP", "javadrop"); packagerVariables.put("PKG_USERID", "55"); packagerVariables.put("PKG_GROUPID", "700"); packagerVariables.put("RUNNER_NAME", "service"); packagerVariables.put("RUNNER_INSTALL_LOC","/usr/local/javadrop/service"); packagerVariables.put("RUNNER_USER", "javadrop"); packagerVariables.put("RUNNER_GROUP", "javadrop"); } /** * Check the values supplied in the pom to see if they are ok. * @throws MojoExecutionException */ @Override public void applyParameters(Map<String, String> paramMap)// throws MojoExecutionException { applyDefaults(); for (Map.Entry<String,String> entry : paramMap.entrySet()) { packagerVariables.put(entry.getKey(), entry.getValue()); } } @Override public void processTemplates(RunnerStrategy runner, TemplateProcessor processor, File workingDirectory) throws MojoExecutionException { // Go through and create all the destination locations. if (!workingDirectory.exists()) { workingDirectory.mkdirs(); } Map<File, File> conversionFiles = runner.getConversionFiles(workingDirectory); Map<String, String> templateParameters = new HashMap<String,String>(); templateParameters.putAll(packagerVariables); templateParameters.putAll(runner.getParameters()); conversionFiles.putAll(getConversionFiles(workingDirectory, runner)); for (Entry<File, File> cfile : conversionFiles.entrySet()) { - processor.applyVTemplate(cfile.getKey(), cfile.getValue(), templateParameters); + // TODO - Quick and dirty fix to support copying binaries around + if (cfile.getKey().getName().toLowerCase().endsWith("jar")) { + cfile.getKey().renameTo(cfile.getValue()); + } else { + processor.applyVTemplate(cfile.getKey(), cfile.getValue(), templateParameters); + } } } /** * Returns the install location * * @return Where the package is to be installed... As a string. */ protected String getInstallLoc() { return packagerVariables.get("PKG_INSTALL_LOC"); } public void set_log(Log _log) { this._log = _log; } public Log get_log() { return _log; } protected String getGroup() { return packagerVariables.get("PKG_GROUP"); } protected int getGid() { int gid = Integer.parseInt(packagerVariables.get("PKG_GROUPID")); return gid; } protected String getUser() { return packagerVariables.get("PKG_USER"); } protected int getUid() { int userid = Integer.parseInt(packagerVariables.get("PKG_USERID")); return userid; } }
true
true
public void processTemplates(RunnerStrategy runner, TemplateProcessor processor, File workingDirectory) throws MojoExecutionException { // Go through and create all the destination locations. if (!workingDirectory.exists()) { workingDirectory.mkdirs(); } Map<File, File> conversionFiles = runner.getConversionFiles(workingDirectory); Map<String, String> templateParameters = new HashMap<String,String>(); templateParameters.putAll(packagerVariables); templateParameters.putAll(runner.getParameters()); conversionFiles.putAll(getConversionFiles(workingDirectory, runner)); for (Entry<File, File> cfile : conversionFiles.entrySet()) { processor.applyVTemplate(cfile.getKey(), cfile.getValue(), templateParameters); } }
public void processTemplates(RunnerStrategy runner, TemplateProcessor processor, File workingDirectory) throws MojoExecutionException { // Go through and create all the destination locations. if (!workingDirectory.exists()) { workingDirectory.mkdirs(); } Map<File, File> conversionFiles = runner.getConversionFiles(workingDirectory); Map<String, String> templateParameters = new HashMap<String,String>(); templateParameters.putAll(packagerVariables); templateParameters.putAll(runner.getParameters()); conversionFiles.putAll(getConversionFiles(workingDirectory, runner)); for (Entry<File, File> cfile : conversionFiles.entrySet()) { // TODO - Quick and dirty fix to support copying binaries around if (cfile.getKey().getName().toLowerCase().endsWith("jar")) { cfile.getKey().renameTo(cfile.getValue()); } else { processor.applyVTemplate(cfile.getKey(), cfile.getValue(), templateParameters); } } }
diff --git a/src/edu/rpi/cmt/access/Acl.java b/src/edu/rpi/cmt/access/Acl.java index d994ece..4615c21 100644 --- a/src/edu/rpi/cmt/access/Acl.java +++ b/src/edu/rpi/cmt/access/Acl.java @@ -1,619 +1,621 @@ /* ********************************************************************** Copyright 2006 Rensselaer Polytechnic Institute. All worldwide rights reserved. Redistribution and use of this distribution in source and binary forms, with or without modification, are permitted provided that: The above copyright notice and this permission notice appear in all copies and supporting documentation; The name, identifiers, and trademarks of Rensselaer Polytechnic Institute are not used in advertising or publicity without the express prior written permission of Rensselaer Polytechnic Institute; DISCLAIMER: The software is distributed" AS IS" without any express or implied warranty, including but not limited to, any implied warranties of merchantability or fitness for a particular purpose or any warrant)' of non-infringement of any current or pending patent rights. The authors of the software make no representations about the suitability of this software for any particular purpose. The entire risk as to the quality and performance of the software is with the user. Should the software prove defective, the user assumes the cost of all necessary servicing, repair or correction. In particular, neither Rensselaer Polytechnic Institute, nor the authors of the software are liable for any indirect, special, consequential, or incidental damages related to the software, to the maximum extent the law permits. */ package edu.rpi.cmt.access; import java.io.Serializable; import java.util.Collection; import java.util.TreeMap; /** Object to represent an acl for a calendar entity or service. We should * have one of these per session - or perhaps thread - and lock it during * processing. * * <p>The objects represented by Privileges will assume transient states * during processing. * * <p>An ACL is a set of ACEs which are stored as an encoded character * array. These aces should be sorted to facilitate merging and to * allow us to possibly only process as much of the acl as is necessary. * * <p>For example, owner access should come first, it's first in the test and * we can avoid decoding an ace which doesn't include any owner access. * * <p>The whoTypexxx declarations in Ace define the order of Ace types. In * addition, any aces that contain names should be in ascending alphabetic * order. * * <p>In the list of Ace there can only be one entry per AceWho so we can * represent the list as a SortedMap. Replacement then becomes easy. * * @author Mike Douglass douglm - rpi.edu */ public class Acl extends EncodedAcl implements PrivilegeDefs { boolean debug; private TreeMap<AceWho, Ace> aces; /** Used while evaluating access */ /** Constructor * */ public Acl() { this(false); } /** Constructor * * @param debug */ public Acl(boolean debug) { this.debug = debug; } /** Turn debugging on/off * * @param val */ public void setDebug(boolean val) { debug = val; } /** Remove all ace entries * */ public void clear() { aces = null; } /** Result of evaluating access to an object for a principal */ public static class CurrentAccess implements Serializable { /** The Acl used to evaluate the access. We should not necessarily * make this available to the client. */ public Acl acl; /** Allowed access for each privilege type * @see PrivilegeDefs */ public PrivilegeSet privileges = null; /** Privileges desired */ public Privilege[] desiredAccess; /** Was it succesful */ public boolean accessAllowed; public String toString() { StringBuffer sb = new StringBuffer("CurrentAccess{"); sb.append("acl="); sb.append(acl); sb.append("accessAllowed="); sb.append(accessAllowed); sb.append("}"); return sb.toString(); } } /** Evaluating an ACL * * <p>The process of evaluating access is as follows: * * <p>For an unauthenticated (guest) user we look for an entry with an * unauthenticated 'who' field. If none exists access is denied othewise the * indicated privileges are used to determine access. * * <p>If the principal is authenticated there are a number of steps in the process * which are executed in the following order: * * <ol> * <li>If the principal is the owner then use the given access or the default.</li> * * <li>If there are specific ACEs for the user use the merged access. </li> * * <li>Find all group entries for the given user's groups. If there is more than * one combine them with the more permissive taking precedence, e.g * write allowed overrides write denied * <p>If any group entries were found we're done.</li> * * <li>if there is an 'other' entry (i.e. not Owner) use that.</li> * * <li>if there is an authenticated entry use that.</li> * * <li>Otherwise apply defaults - for the owner full acccess, for any others no * access</li> * * @param who * @param owner * @param how * @param acl * @param filter if not null specifies maximum access * @return CurrentAccess access + allowed/disallowed * @throws AccessException */ public CurrentAccess evaluateAccess(AccessPrincipal who, String owner, Privilege[] how, char[] acl, PrivilegeSet filter) throws AccessException { boolean authenticated = !who.getUnauthenticated(); boolean isOwner = false; CurrentAccess ca = new CurrentAccess(); ca.desiredAccess = how; ca.acl = this; decode(acl); if (authenticated) { isOwner = who.getAccount().equals(owner); } StringBuffer debugsb = null; if (debug) { debugsb = new StringBuffer("Check access for '"); debugsb.append(new String(acl)); debugsb.append("' with authenticated = "); debugsb.append(authenticated); debugsb.append(" isOwner = "); debugsb.append(isOwner); } getPrivileges: { if (!authenticated) { ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeUnauthenticated); if (ca.privileges == null) { // All might be available ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeAll); } break getPrivileges; } if (isOwner) { ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeOwner); if (ca.privileges == null) { ca.privileges = PrivilegeSet.makeDefaultOwnerPrivileges(); } if (debug) { debugsb.append("... For owner got: " + ca.privileges); } break getPrivileges; } // Not owner - look for user ca.privileges = Ace.findMergedPrivilege(this, who.getAccount(), Ace.whoTypeUser); if (ca.privileges != null) { if (debug) { debugsb.append("... For user got: " + ca.privileges); } break getPrivileges; } // No specific user access - look for group access if (who.getGroupNames() != null) { for (String group: who.getGroupNames()) { if (debug) { debugsb.append("...Try access for group " + group); } PrivilegeSet privs = Ace.findMergedPrivilege(this, group, Ace.whoTypeGroup); if (privs != null) { ca.privileges = PrivilegeSet.mergePrivileges(ca.privileges, privs, false); } } } if (ca.privileges != null) { if (debug) { debugsb.append("...For groups got: " + ca.privileges); } break getPrivileges; } // "other" access set? ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeOther); if (ca.privileges == null) { // All might be available ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeAll); } if (ca.privileges != null) { if (debug) { debugsb.append("...For other got: " + ca.privileges); } break getPrivileges; } } // getPrivileges if (ca.privileges == null) { if (debug) { debugMsg(debugsb.toString() + "...Check access denied (noprivs)"); } return ca; } ca.privileges.setUnspecified(isOwner); if (filter != null) { ca.privileges.filterPrivileges(filter); } if (how.length == 0) { // Means any access will do - debugMsg(debugsb.toString() + "...Check access allowed (any requested)"); + if (debug) { + debugMsg(debugsb.toString() + "...Check access allowed (any requested)"); + } ca.accessAllowed = ca.privileges.getAnyAllowed(); return ca; } for (int i = 0; i < how.length; i++) { char priv = ca.privileges.getPrivilege(how[i].getIndex()); if ((priv != allowed) && (priv != allowedInherited)) { if (debug) { debugsb.append("...Check access denied (!allowed) "); debugsb.append(ca.privileges); debugMsg(debugsb.toString()); } return ca; } } if (debug) { debugMsg(debugsb.toString() + "...Check access allowed"); } ca.accessAllowed = true; return ca; } /** Return the ace collection for previously decoded access * * @return Collection ace collection for previously decoded access * @throws AccessException */ public Collection<Ace> getAces() throws AccessException { return aces.values(); } /** Add an entry to the Acl * * @param val Ace to add */ public void addAce(Ace val) { if (aces == null) { aces = new TreeMap<AceWho, Ace>(); } aces.put(val.getWho(), val); } /** Set to default access * */ public void defaultAccess() { aces = null; // reset addAce(new Ace(null, false, Ace.whoTypeOwner, Privileges.makePriv(Privileges.privAll))); addAce(new Ace(null, false, Ace.whoTypeOther, Privileges.makePriv(Privileges.privNone))); } /** Remove access for a given 'who' entry * * @param who * @return boolean true if removed */ public boolean removeWho(AceWho who) { if (aces == null) { return false; } return aces.remove(who) != null; } /* ==================================================================== * Decoding methods * ==================================================================== */ /** Given an encoded acl convert to an ordered sequence of fully expanded * ace objects. * * @param val String val to decode * @throws AccessException */ public void decode(String val) throws AccessException { decode(val.toCharArray()); } /** Given an encoded acl convert to an ordered sequence of fully expanded * ace objects. * * @param val char[] val to decode * @throws AccessException */ public void decode(char[] val) throws AccessException { setEncoded(val); if (empty()) { defaultAccess(); } else { aces = new TreeMap<AceWho, Ace>(); while (hasMore()) { Ace ace = new Ace(); ace.decode(this, true); aces.put(ace.getWho(), ace); } } } /** Given an encoded acl merge it into this objects ace list. This process * should be carried out moving up from the end of the path to the root as * entries will only be added to the merged list if the notWho + whoType + who * do not match. * * <p>The inherited flag will be set on all merged Ace objects. * * <p>For example, if we have the path structure * <pre> * /user owner=sys,access=write-content owner * /jeb owner=jeb,access=write-content owner * /calendar owner=jeb no special access * /rocalendar owner=jeb read owner * </pre> * then, while evaluating the access for rocalendar we start at rocalendar * and move up the tree. The "read owner" access on rocalendar overrides any * access we find further up the tree, e.g. "write-content owner" * * <p>While evaluating the access for calendar we start at calendar * and move up the tree. There is no overriding access so the final access is * "write-content owner" inherited from /user/jeb * * <p>Also note the encoded value will not reflect the eventual Acl. * * <p>And what did that mean? I think I meant that we can derive the acl for * an entity from the merged result. * * @param val char[] val to decode and merge * @param path path of current entity * @throws AccessException */ public void merge(char[] val, String path) throws AccessException { EncodedAcl ea = new EncodedAcl(); ea.setEncoded(val); if (ea.empty()) { return; } while (ea.hasMore()) { Ace ace = new Ace(); ace.decode(ea, true); if (!ace.getInherited()) { ace.setInherited(true); ace.setInheritedFrom(path); } if (aces == null) { aces = new TreeMap<AceWho, Ace>(); } /* If we don't have this who yet then add it to the result. Otherwise the * who from lower down takes precedence. */ if (aces.get(ace.getWho()) == null) { aces.put(ace.getWho(), ace); } } } /* * Given a decoded acl merge it into this objects ace list. This process * should be carried out moving up from the end of the path to the root as * entries will only be added to the merged list if the notWho + whoType + who * do not match. * * <p>The inherited flag will be set on all merged Ace objects. * <p>XXX Note that reuse of Acls for merges invalidates the inherited flag. * I think it's only used for display and acl modification purposes so * shouldn't affect normal access control checks. * * <p>Also note the encoded value will not reflect the eventual Acl. * * @param val Acl to merge * @throws AccessException * / public void merge(Acl val) throws AccessException { Collection<Ace> valAces = val.getAces(); if (valAces == null) { return; } for (Ace ace: valAces) { ace.setInherited(true); if (!aces.contains(ace)) { aces.add(ace); } } }*/ /* ==================================================================== * Encoding methods * ==================================================================== */ /** Encode this object after manipulation or creation. Inherited entries * will be skipped. * * @return char[] encoded value * @throws AccessException */ public char[] encode() throws AccessException { startEncoding(); if (aces == null) { return null; } for (Ace ace: aces.values()) { if (!ace.getInherited()) { ace.encode(this); } } return getEncoding(); } /** Encode this object after manipulation or creation. Inherited entries * will be skipped. Returns null for no aces * * @return String encoded value or null * @throws AccessException */ public String encodeStr() throws AccessException { startEncoding(); char[] encoded = encode(); if (encoded == null) { return null; } return new String(encoded); } /** Encode this object after manipulation or creation. Inherited entries * will NOT be skipped. * * @return char[] encoded value * @throws AccessException */ public char[] encodeAll() throws AccessException { startEncoding(); if (aces == null) { return null; } for (Ace ace: aces.values()) { ace.encode(this); } return getEncoding(); } /* ==================================================================== * Object methods * ==================================================================== */ /** Provide a string representation for user display - this should * use a localized resource and be part of a display level. * * @return String representation */ public String toUserString() { StringBuffer sb = new StringBuffer(); try { decode(getEncoded()); for (Ace ace: aces.values()) { sb.append(ace.toString()); sb.append(" "); } } catch (Throwable t) { error(t); sb.append("Decode exception " + t.getMessage()); } return sb.toString(); } public String toString() { StringBuffer sb = new StringBuffer(); sb.append("Acl{"); if (!empty()) { sb.append("encoded=["); rewind(); while (hasMore()) { sb.append(getChar()); } sb.append("] "); rewind(); try { if (aces == null) { decode(getEncoded()); } for (Ace ace: aces.values()) { sb.append("\n"); sb.append(ace.toString()); } } catch (Throwable t) { error(t); sb.append("Decode exception " + t.getMessage()); } } sb.append("}"); return sb.toString(); } /** For testing * * @param args */ public static void main(String[] args) { try { Acl acl = new Acl(); acl.decode(args[0]); System.out.println(acl.toString()); } catch (Throwable t) { t.printStackTrace(); } } }
true
true
public CurrentAccess evaluateAccess(AccessPrincipal who, String owner, Privilege[] how, char[] acl, PrivilegeSet filter) throws AccessException { boolean authenticated = !who.getUnauthenticated(); boolean isOwner = false; CurrentAccess ca = new CurrentAccess(); ca.desiredAccess = how; ca.acl = this; decode(acl); if (authenticated) { isOwner = who.getAccount().equals(owner); } StringBuffer debugsb = null; if (debug) { debugsb = new StringBuffer("Check access for '"); debugsb.append(new String(acl)); debugsb.append("' with authenticated = "); debugsb.append(authenticated); debugsb.append(" isOwner = "); debugsb.append(isOwner); } getPrivileges: { if (!authenticated) { ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeUnauthenticated); if (ca.privileges == null) { // All might be available ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeAll); } break getPrivileges; } if (isOwner) { ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeOwner); if (ca.privileges == null) { ca.privileges = PrivilegeSet.makeDefaultOwnerPrivileges(); } if (debug) { debugsb.append("... For owner got: " + ca.privileges); } break getPrivileges; } // Not owner - look for user ca.privileges = Ace.findMergedPrivilege(this, who.getAccount(), Ace.whoTypeUser); if (ca.privileges != null) { if (debug) { debugsb.append("... For user got: " + ca.privileges); } break getPrivileges; } // No specific user access - look for group access if (who.getGroupNames() != null) { for (String group: who.getGroupNames()) { if (debug) { debugsb.append("...Try access for group " + group); } PrivilegeSet privs = Ace.findMergedPrivilege(this, group, Ace.whoTypeGroup); if (privs != null) { ca.privileges = PrivilegeSet.mergePrivileges(ca.privileges, privs, false); } } } if (ca.privileges != null) { if (debug) { debugsb.append("...For groups got: " + ca.privileges); } break getPrivileges; } // "other" access set? ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeOther); if (ca.privileges == null) { // All might be available ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeAll); } if (ca.privileges != null) { if (debug) { debugsb.append("...For other got: " + ca.privileges); } break getPrivileges; } } // getPrivileges if (ca.privileges == null) { if (debug) { debugMsg(debugsb.toString() + "...Check access denied (noprivs)"); } return ca; } ca.privileges.setUnspecified(isOwner); if (filter != null) { ca.privileges.filterPrivileges(filter); } if (how.length == 0) { // Means any access will do debugMsg(debugsb.toString() + "...Check access allowed (any requested)"); ca.accessAllowed = ca.privileges.getAnyAllowed(); return ca; } for (int i = 0; i < how.length; i++) { char priv = ca.privileges.getPrivilege(how[i].getIndex()); if ((priv != allowed) && (priv != allowedInherited)) { if (debug) { debugsb.append("...Check access denied (!allowed) "); debugsb.append(ca.privileges); debugMsg(debugsb.toString()); } return ca; } } if (debug) { debugMsg(debugsb.toString() + "...Check access allowed"); } ca.accessAllowed = true; return ca; }
public CurrentAccess evaluateAccess(AccessPrincipal who, String owner, Privilege[] how, char[] acl, PrivilegeSet filter) throws AccessException { boolean authenticated = !who.getUnauthenticated(); boolean isOwner = false; CurrentAccess ca = new CurrentAccess(); ca.desiredAccess = how; ca.acl = this; decode(acl); if (authenticated) { isOwner = who.getAccount().equals(owner); } StringBuffer debugsb = null; if (debug) { debugsb = new StringBuffer("Check access for '"); debugsb.append(new String(acl)); debugsb.append("' with authenticated = "); debugsb.append(authenticated); debugsb.append(" isOwner = "); debugsb.append(isOwner); } getPrivileges: { if (!authenticated) { ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeUnauthenticated); if (ca.privileges == null) { // All might be available ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeAll); } break getPrivileges; } if (isOwner) { ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeOwner); if (ca.privileges == null) { ca.privileges = PrivilegeSet.makeDefaultOwnerPrivileges(); } if (debug) { debugsb.append("... For owner got: " + ca.privileges); } break getPrivileges; } // Not owner - look for user ca.privileges = Ace.findMergedPrivilege(this, who.getAccount(), Ace.whoTypeUser); if (ca.privileges != null) { if (debug) { debugsb.append("... For user got: " + ca.privileges); } break getPrivileges; } // No specific user access - look for group access if (who.getGroupNames() != null) { for (String group: who.getGroupNames()) { if (debug) { debugsb.append("...Try access for group " + group); } PrivilegeSet privs = Ace.findMergedPrivilege(this, group, Ace.whoTypeGroup); if (privs != null) { ca.privileges = PrivilegeSet.mergePrivileges(ca.privileges, privs, false); } } } if (ca.privileges != null) { if (debug) { debugsb.append("...For groups got: " + ca.privileges); } break getPrivileges; } // "other" access set? ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeOther); if (ca.privileges == null) { // All might be available ca.privileges = Ace.findMergedPrivilege(this, null, Ace.whoTypeAll); } if (ca.privileges != null) { if (debug) { debugsb.append("...For other got: " + ca.privileges); } break getPrivileges; } } // getPrivileges if (ca.privileges == null) { if (debug) { debugMsg(debugsb.toString() + "...Check access denied (noprivs)"); } return ca; } ca.privileges.setUnspecified(isOwner); if (filter != null) { ca.privileges.filterPrivileges(filter); } if (how.length == 0) { // Means any access will do if (debug) { debugMsg(debugsb.toString() + "...Check access allowed (any requested)"); } ca.accessAllowed = ca.privileges.getAnyAllowed(); return ca; } for (int i = 0; i < how.length; i++) { char priv = ca.privileges.getPrivilege(how[i].getIndex()); if ((priv != allowed) && (priv != allowedInherited)) { if (debug) { debugsb.append("...Check access denied (!allowed) "); debugsb.append(ca.privileges); debugMsg(debugsb.toString()); } return ca; } } if (debug) { debugMsg(debugsb.toString() + "...Check access allowed"); } ca.accessAllowed = true; return ca; }
diff --git a/src/com/android/calendar/agenda/AgendaWindowAdapter.java b/src/com/android/calendar/agenda/AgendaWindowAdapter.java index ac30998a..8ac14eb3 100644 --- a/src/com/android/calendar/agenda/AgendaWindowAdapter.java +++ b/src/com/android/calendar/agenda/AgendaWindowAdapter.java @@ -1,1349 +1,1352 @@ /* * Copyright (C) 2009 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.calendar.agenda; import android.content.AsyncQueryHandler; import android.content.ContentResolver; import android.content.ContentUris; import android.content.Context; import android.content.res.Resources; import android.database.Cursor; import android.net.Uri; import android.os.Handler; import android.provider.CalendarContract; import android.provider.CalendarContract.Attendees; import android.provider.CalendarContract.Calendars; import android.provider.CalendarContract.Instances; import android.text.format.DateUtils; import android.text.format.Time; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.AbsListView.OnScrollListener; import android.widget.BaseAdapter; import android.widget.GridLayout; import android.widget.TextView; import com.android.calendar.CalendarController; import com.android.calendar.CalendarController.EventType; import com.android.calendar.CalendarController.ViewType; import com.android.calendar.R; import com.android.calendar.StickyHeaderListView; import com.android.calendar.Utils; import java.util.Formatter; import java.util.Iterator; import java.util.LinkedList; import java.util.Locale; import java.util.concurrent.ConcurrentLinkedQueue; /* Bugs Bugs Bugs: - At rotation and launch time, the initial position is not set properly. This code is calling listview.setSelection() in 2 rapid secessions but it dropped or didn't process the first one. - Scroll using trackball isn't repositioning properly after a new adapter is added. - Track ball clicks at the header/footer doesn't work. - Potential ping pong effect if the prefetch window is big and data is limited - Add index in calendar provider ToDo ToDo ToDo: Get design of header and footer from designer Make scrolling smoother. Test for correctness Loading speed Check for leaks and excessive allocations */ public class AgendaWindowAdapter extends BaseAdapter implements StickyHeaderListView.HeaderIndexer, StickyHeaderListView.HeaderHeightListener{ static final boolean BASICLOG = false; static final boolean DEBUGLOG = false; private static final String TAG = "AgendaWindowAdapter"; private static final String AGENDA_SORT_ORDER = CalendarContract.Instances.START_DAY + " ASC, " + CalendarContract.Instances.BEGIN + " ASC, " + CalendarContract.Events.TITLE + " ASC"; public static final int INDEX_INSTANCE_ID = 0; public static final int INDEX_TITLE = 1; public static final int INDEX_EVENT_LOCATION = 2; public static final int INDEX_ALL_DAY = 3; public static final int INDEX_HAS_ALARM = 4; public static final int INDEX_COLOR = 5; public static final int INDEX_RRULE = 6; public static final int INDEX_BEGIN = 7; public static final int INDEX_END = 8; public static final int INDEX_EVENT_ID = 9; public static final int INDEX_START_DAY = 10; public static final int INDEX_END_DAY = 11; public static final int INDEX_SELF_ATTENDEE_STATUS = 12; public static final int INDEX_ORGANIZER = 13; public static final int INDEX_OWNER_ACCOUNT = 14; public static final int INDEX_CAN_ORGANIZER_RESPOND= 15; public static final int INDEX_TIME_ZONE = 16; private static final String[] PROJECTION = new String[] { Instances._ID, // 0 Instances.TITLE, // 1 Instances.EVENT_LOCATION, // 2 Instances.ALL_DAY, // 3 Instances.HAS_ALARM, // 4 Instances.DISPLAY_COLOR, // 5 Instances.RRULE, // 6 Instances.BEGIN, // 7 Instances.END, // 8 Instances.EVENT_ID, // 9 Instances.START_DAY, // 10 Julian start day Instances.END_DAY, // 11 Julian end day Instances.SELF_ATTENDEE_STATUS, // 12 Instances.ORGANIZER, // 13 Instances.OWNER_ACCOUNT, // 14 Instances.CAN_ORGANIZER_RESPOND, // 15 Instances.EVENT_TIMEZONE, // 16 }; // Listview may have a bug where the index/position is not consistent when there's a header. // position == positionInListView - OFF_BY_ONE_BUG // TODO Need to look into this. private static final int OFF_BY_ONE_BUG = 1; private static final int MAX_NUM_OF_ADAPTERS = 5; private static final int IDEAL_NUM_OF_EVENTS = 50; private static final int MIN_QUERY_DURATION = 7; // days private static final int MAX_QUERY_DURATION = 60; // days private static final int PREFETCH_BOUNDARY = 1; /** Times to auto-expand/retry query after getting no data */ private static final int RETRIES_ON_NO_DATA = 1; private final Context mContext; private final Resources mResources; private final QueryHandler mQueryHandler; private final AgendaListView mAgendaListView; /** The sum of the rows in all the adapters */ private int mRowCount; /** The number of times we have queried and gotten no results back */ private int mEmptyCursorCount; /** Cached value of the last used adapter */ private DayAdapterInfo mLastUsedInfo; private final LinkedList<DayAdapterInfo> mAdapterInfos = new LinkedList<DayAdapterInfo>(); private final ConcurrentLinkedQueue<QuerySpec> mQueryQueue = new ConcurrentLinkedQueue<QuerySpec>(); private final TextView mHeaderView; private final TextView mFooterView; private boolean mDoneSettingUpHeaderFooter = false; private final boolean mIsTabletConfig; boolean mCleanQueryInitiated = false; private int mStickyHeaderSize = 44; // Initial size big enough for it to work /** * When the user scrolled to the top, a query will be made for older events * and this will be incremented. Don't make more requests if * mOlderRequests > mOlderRequestsProcessed. */ private int mOlderRequests; /** Number of "older" query that has been processed. */ private int mOlderRequestsProcessed; /** * When the user scrolled to the bottom, a query will be made for newer * events and this will be incremented. Don't make more requests if * mNewerRequests > mNewerRequestsProcessed. */ private int mNewerRequests; /** Number of "newer" query that has been processed. */ private int mNewerRequestsProcessed; // Note: Formatter is not thread safe. Fine for now as it is only used by the main thread. private final Formatter mFormatter; private final StringBuilder mStringBuilder; private String mTimeZone; // defines if to pop-up the current event when the agenda is first shown private final boolean mShowEventOnStart; private final Runnable mTZUpdater = new Runnable() { @Override public void run() { mTimeZone = Utils.getTimeZone(mContext, this); notifyDataSetChanged(); } }; private boolean mShuttingDown; private boolean mHideDeclined; // Used to stop a fling motion if the ListView is set to a specific position int mListViewScrollState = OnScrollListener.SCROLL_STATE_IDLE; /** The current search query, or null if none */ private String mSearchQuery; private long mSelectedInstanceId = -1; private final int mSelectedItemBackgroundColor; private final int mSelectedItemTextColor; private final float mItemRightMargin; // Types of Query private static final int QUERY_TYPE_OLDER = 0; // Query for older events private static final int QUERY_TYPE_NEWER = 1; // Query for newer events private static final int QUERY_TYPE_CLEAN = 2; // Delete everything and query around a date private static class QuerySpec { long queryStartMillis; Time goToTime; int start; int end; String searchQuery; int queryType; long id; public QuerySpec(int queryType) { this.queryType = queryType; id = -1; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + end; result = prime * result + (int) (queryStartMillis ^ (queryStartMillis >>> 32)); result = prime * result + queryType; result = prime * result + start; if (searchQuery != null) { result = prime * result + searchQuery.hashCode(); } if (goToTime != null) { long goToTimeMillis = goToTime.toMillis(false); result = prime * result + (int) (goToTimeMillis ^ (goToTimeMillis >>> 32)); } result = prime * result + (int)id; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; QuerySpec other = (QuerySpec) obj; if (end != other.end || queryStartMillis != other.queryStartMillis || queryType != other.queryType || start != other.start || Utils.equals(searchQuery, other.searchQuery) || id != other.id) { return false; } if (goToTime != null) { if (goToTime.toMillis(false) != other.goToTime.toMillis(false)) { return false; } } else { if (other.goToTime != null) { return false; } } return true; } } static class EventInfo { long begin; long end; long id; int startDay; boolean allDay; } static class DayAdapterInfo { Cursor cursor; AgendaByDayAdapter dayAdapter; int start; // start day of the cursor's coverage int end; // end day of the cursor's coverage int offset; // offset in position in the list view int size; // dayAdapter.getCount() public DayAdapterInfo(Context context) { dayAdapter = new AgendaByDayAdapter(context); } @Override public String toString() { // Static class, so the time in this toString will not reflect the // home tz settings. This should only affect debugging. Time time = new Time(); StringBuilder sb = new StringBuilder(); time.setJulianDay(start); time.normalize(false); sb.append("Start:").append(time.toString()); time.setJulianDay(end); time.normalize(false); sb.append(" End:").append(time.toString()); sb.append(" Offset:").append(offset); sb.append(" Size:").append(size); return sb.toString(); } } public AgendaWindowAdapter(Context context, AgendaListView agendaListView, boolean showEventOnStart) { mContext = context; mResources = context.getResources(); mSelectedItemBackgroundColor = mResources .getColor(R.color.agenda_selected_background_color); mSelectedItemTextColor = mResources.getColor(R.color.agenda_selected_text_color); mItemRightMargin = mResources.getDimension(R.dimen.agenda_item_right_margin); mIsTabletConfig = Utils.getConfigBool(mContext, R.bool.tablet_config); mTimeZone = Utils.getTimeZone(context, mTZUpdater); mAgendaListView = agendaListView; mQueryHandler = new QueryHandler(context.getContentResolver()); mStringBuilder = new StringBuilder(50); mFormatter = new Formatter(mStringBuilder, Locale.getDefault()); mShowEventOnStart = showEventOnStart; // Implies there is no sticky header if (!mShowEventOnStart) { mStickyHeaderSize = 0; } mSearchQuery = null; LayoutInflater inflater = (LayoutInflater) context .getSystemService(Context.LAYOUT_INFLATER_SERVICE); mHeaderView = (TextView)inflater.inflate(R.layout.agenda_header_footer, null); mFooterView = (TextView)inflater.inflate(R.layout.agenda_header_footer, null); mHeaderView.setText(R.string.loading); mAgendaListView.addHeaderView(mHeaderView); } // Method in Adapter @Override public int getViewTypeCount() { return AgendaByDayAdapter.TYPE_LAST; } // Method in BaseAdapter @Override public boolean areAllItemsEnabled() { return false; } // Method in Adapter @Override public int getItemViewType(int position) { DayAdapterInfo info = getAdapterInfoByPosition(position); if (info != null) { return info.dayAdapter.getItemViewType(position - info.offset); } else { return -1; } } // Method in BaseAdapter @Override public boolean isEnabled(int position) { DayAdapterInfo info = getAdapterInfoByPosition(position); if (info != null) { return info.dayAdapter.isEnabled(position - info.offset); } else { return false; } } // Abstract Method in BaseAdapter public int getCount() { return mRowCount; } // Abstract Method in BaseAdapter public Object getItem(int position) { DayAdapterInfo info = getAdapterInfoByPosition(position); if (info != null) { return info.dayAdapter.getItem(position - info.offset); } else { return null; } } // Method in BaseAdapter @Override public boolean hasStableIds() { return true; } // Abstract Method in BaseAdapter @Override public long getItemId(int position) { DayAdapterInfo info = getAdapterInfoByPosition(position); if (info != null) { int curPos = info.dayAdapter.getCursorPosition(position - info.offset); if (curPos == Integer.MIN_VALUE) { return -1; } // Regular event if (curPos >= 0) { info.cursor.moveToPosition(curPos); return info.cursor.getLong(AgendaWindowAdapter.INDEX_EVENT_ID) << 20 + info.cursor.getLong(AgendaWindowAdapter.INDEX_BEGIN); } // Day Header return info.dayAdapter.findJulianDayFromPosition(position); } else { return -1; } } // Abstract Method in BaseAdapter public View getView(int position, View convertView, ViewGroup parent) { if (position >= (mRowCount - PREFETCH_BOUNDARY) && mNewerRequests <= mNewerRequestsProcessed) { if (DEBUGLOG) Log.e(TAG, "queryForNewerEvents: "); mNewerRequests++; queueQuery(new QuerySpec(QUERY_TYPE_NEWER)); } if (position < PREFETCH_BOUNDARY && mOlderRequests <= mOlderRequestsProcessed) { if (DEBUGLOG) Log.e(TAG, "queryForOlderEvents: "); mOlderRequests++; queueQuery(new QuerySpec(QUERY_TYPE_OLDER)); } final View v; DayAdapterInfo info = getAdapterInfoByPosition(position); if (info != null) { int offset = position - info.offset; v = info.dayAdapter.getView(offset, convertView, parent); // Turn on the past/present separator if the view is a day header // and it is the first day with events after yesterday. if (info.dayAdapter.isDayHeaderView(offset)) { View simpleDivider = v.findViewById(R.id.top_divider_simple); View pastPresentDivider = v.findViewById(R.id.top_divider_past_present); if (info.dayAdapter.isFirstDayAfterYesterday(offset)) { if (simpleDivider != null && pastPresentDivider != null) { simpleDivider.setVisibility(View.GONE); pastPresentDivider.setVisibility(View.VISIBLE); } } else if (simpleDivider != null && pastPresentDivider != null) { simpleDivider.setVisibility(View.VISIBLE); pastPresentDivider.setVisibility(View.GONE); } } } else { // TODO Log.e(TAG, "BUG: getAdapterInfoByPosition returned null!!! " + position); TextView tv = new TextView(mContext); tv.setText("Bug! " + position); v = tv; } // If this is not a tablet config don't do selection highlighting if (!mIsTabletConfig) { return v; } // Show selected marker if this is item is selected boolean selected = false; Object yy = v.getTag(); if (yy instanceof AgendaAdapter.ViewHolder) { AgendaAdapter.ViewHolder vh = (AgendaAdapter.ViewHolder) yy; selected = mSelectedInstanceId == vh.instanceId; vh.selectedMarker.setVisibility((selected && mShowEventOnStart) ? View.VISIBLE : View.GONE); if (mShowEventOnStart) { GridLayout.LayoutParams lp = (GridLayout.LayoutParams)vh.textContainer.getLayoutParams(); if (selected) { mSelectedVH = vh; v.setBackgroundColor(mSelectedItemBackgroundColor); vh.title.setTextColor(mSelectedItemTextColor); vh.when.setTextColor(mSelectedItemTextColor); vh.where.setTextColor(mSelectedItemTextColor); lp.setMargins(0, 0, 0, 0); vh.textContainer.setLayoutParams(lp); } else { lp.setMargins(0, 0, (int)mItemRightMargin, 0); vh.textContainer.setLayoutParams(lp); } } } if (DEBUGLOG) { Log.e(TAG, "getView " + position + " = " + getViewTitle(v)); } return v; } private AgendaAdapter.ViewHolder mSelectedVH = null; private int findEventPositionNearestTime(Time time, long id) { DayAdapterInfo info = getAdapterInfoByTime(time); int pos = -1; if (info != null) { pos = info.offset + info.dayAdapter.findEventPositionNearestTime(time, id); } if (DEBUGLOG) Log.e(TAG, "findEventPositionNearestTime " + time + " id:" + id + " =" + pos); return pos; } protected DayAdapterInfo getAdapterInfoByPosition(int position) { synchronized (mAdapterInfos) { if (mLastUsedInfo != null && mLastUsedInfo.offset <= position && position < (mLastUsedInfo.offset + mLastUsedInfo.size)) { return mLastUsedInfo; } for (DayAdapterInfo info : mAdapterInfos) { if (info.offset <= position && position < (info.offset + info.size)) { mLastUsedInfo = info; return info; } } } return null; } private DayAdapterInfo getAdapterInfoByTime(Time time) { if (DEBUGLOG) Log.e(TAG, "getAdapterInfoByTime " + time.toString()); Time tmpTime = new Time(time); long timeInMillis = tmpTime.normalize(true); int day = Time.getJulianDay(timeInMillis, tmpTime.gmtoff); synchronized (mAdapterInfos) { for (DayAdapterInfo info : mAdapterInfos) { if (info.start <= day && day <= info.end) { return info; } } } return null; } public EventInfo getEventByPosition(final int positionInListView) { return getEventByPosition(positionInListView, true); } /** * Return the event info for a given position in the adapter * @param positionInListView * @param returnEventStartDay If true, return actual event startday. Otherwise * return agenda date-header date as the startDay. * The two will differ for multi-day events after the first day. * @return */ public EventInfo getEventByPosition(final int positionInListView, boolean returnEventStartDay) { if (DEBUGLOG) Log.e(TAG, "getEventByPosition " + positionInListView); if (positionInListView < 0) { return null; } final int positionInAdapter = positionInListView - OFF_BY_ONE_BUG; DayAdapterInfo info = getAdapterInfoByPosition(positionInAdapter); if (info == null) { return null; } int cursorPosition = info.dayAdapter.getCursorPosition(positionInAdapter - info.offset); if (cursorPosition == Integer.MIN_VALUE) { return null; } boolean isDayHeader = false; if (cursorPosition < 0) { cursorPosition = -cursorPosition; isDayHeader = true; } if (cursorPosition < info.cursor.getCount()) { EventInfo ei = buildEventInfoFromCursor(info.cursor, cursorPosition, isDayHeader); if (!returnEventStartDay && !isDayHeader) { ei.startDay = info.dayAdapter.findJulianDayFromPosition(positionInAdapter - info.offset); } return ei; } return null; } private EventInfo buildEventInfoFromCursor(final Cursor cursor, int cursorPosition, boolean isDayHeader) { if (cursorPosition == -1) { cursor.moveToFirst(); } else { cursor.moveToPosition(cursorPosition); } EventInfo event = new EventInfo(); event.begin = cursor.getLong(AgendaWindowAdapter.INDEX_BEGIN); event.end = cursor.getLong(AgendaWindowAdapter.INDEX_END); event.startDay = cursor.getInt(AgendaWindowAdapter.INDEX_START_DAY); event.allDay = cursor.getInt(AgendaWindowAdapter.INDEX_ALL_DAY) != 0; if (event.allDay) { // UTC Time time = new Time(mTimeZone); time.setJulianDay(Time.getJulianDay(event.begin, 0)); event.begin = time.toMillis(false /* use isDst */); } else if (isDayHeader) { // Trim to midnight. Time time = new Time(mTimeZone); time.set(event.begin); time.hour = 0; time.minute = 0; time.second = 0; event.begin = time.toMillis(false /* use isDst */); } if (!isDayHeader) { if (event.allDay) { Time time = new Time(mTimeZone); time.setJulianDay(Time.getJulianDay(event.end, 0)); event.end = time.toMillis(false /* use isDst */); } else { event.end = cursor.getLong(AgendaWindowAdapter.INDEX_END); } event.id = cursor.getLong(AgendaWindowAdapter.INDEX_EVENT_ID); } return event; } public void refresh(Time goToTime, long id, String searchQuery, boolean forced, boolean refreshEventInfo) { if (searchQuery != null) { mSearchQuery = searchQuery; } if (DEBUGLOG) { Log.e(TAG, this + ": refresh " + goToTime.toString() + " id " + id + ((searchQuery != null) ? searchQuery : "") + (forced ? " forced" : " not forced") + (refreshEventInfo ? " refresh event info" : "")); } int startDay = Time.getJulianDay(goToTime.toMillis(false), goToTime.gmtoff); if (!forced && isInRange(startDay, startDay)) { // No need to re-query if (!mAgendaListView.isEventVisible(goToTime, id)) { int gotoPosition = findEventPositionNearestTime(goToTime, id); if (gotoPosition > 0) { mAgendaListView.setSelectionFromTop(gotoPosition + OFF_BY_ONE_BUG, mStickyHeaderSize); if (mListViewScrollState == OnScrollListener.SCROLL_STATE_FLING) { mAgendaListView.smoothScrollBy(0, 0); } if (refreshEventInfo) { long newInstanceId = findInstanceIdFromPosition(gotoPosition); if (newInstanceId != getSelectedInstanceId()) { setSelectedInstanceId(newInstanceId); new Handler().post(new Runnable() { @Override public void run() { notifyDataSetChanged(); } }); Cursor tempCursor = getCursorByPosition(gotoPosition); if (tempCursor != null) { int tempCursorPosition = getCursorPositionByPosition(gotoPosition); EventInfo event = buildEventInfoFromCursor(tempCursor, tempCursorPosition, false); CalendarController.getInstance(mContext) .sendEventRelatedEventWithExtra(this, EventType.VIEW_EVENT, event.id, event.begin, event.end, 0, 0, CalendarController.EventInfo.buildViewExtraLong( Attendees.ATTENDEE_STATUS_NONE, event.allDay), -1); } } } } Time actualTime = new Time(mTimeZone); actualTime.set(goToTime); CalendarController.getInstance(mContext).sendEvent(this, EventType.UPDATE_TITLE, actualTime, actualTime, -1, ViewType.CURRENT); } return; } // If AllInOneActivity is sending a second GOTO event(in OnResume), ignore it. if (!mCleanQueryInitiated || searchQuery != null) { // Query for a total of MIN_QUERY_DURATION days int endDay = startDay + MIN_QUERY_DURATION; mSelectedInstanceId = -1; mCleanQueryInitiated = true; queueQuery(startDay, endDay, goToTime, searchQuery, QUERY_TYPE_CLEAN, id); // Pre-fetch more data to overcome a race condition in AgendaListView.shiftSelection // Queuing more data with the goToTime set to the selected time skips the call to // shiftSelection on refresh. mOlderRequests++; queueQuery(0, 0, goToTime, searchQuery, QUERY_TYPE_OLDER, id); mNewerRequests++; queueQuery(0, 0, goToTime, searchQuery, QUERY_TYPE_NEWER, id); } } public void close() { mShuttingDown = true; pruneAdapterInfo(QUERY_TYPE_CLEAN); if (mQueryHandler != null) { mQueryHandler.cancelOperation(0); } } private DayAdapterInfo pruneAdapterInfo(int queryType) { synchronized (mAdapterInfos) { DayAdapterInfo recycleMe = null; if (!mAdapterInfos.isEmpty()) { if (mAdapterInfos.size() >= MAX_NUM_OF_ADAPTERS) { if (queryType == QUERY_TYPE_NEWER) { recycleMe = mAdapterInfos.removeFirst(); } else if (queryType == QUERY_TYPE_OLDER) { recycleMe = mAdapterInfos.removeLast(); // Keep the size only if the oldest items are removed. recycleMe.size = 0; } if (recycleMe != null) { if (recycleMe.cursor != null) { recycleMe.cursor.close(); } return recycleMe; } } if (mRowCount == 0 || queryType == QUERY_TYPE_CLEAN) { mRowCount = 0; int deletedRows = 0; DayAdapterInfo info; do { info = mAdapterInfos.poll(); if (info != null) { // TODO the following causes ANR's. Do this in a thread. info.cursor.close(); deletedRows += info.size; recycleMe = info; } } while (info != null); if (recycleMe != null) { recycleMe.cursor = null; recycleMe.size = deletedRows; } } } return recycleMe; } } private String buildQuerySelection() { // Respect the preference to show/hide declined events if (mHideDeclined) { return Calendars.VISIBLE + "=1 AND " + Instances.SELF_ATTENDEE_STATUS + "!=" + Attendees.ATTENDEE_STATUS_DECLINED; } else { return Calendars.VISIBLE + "=1"; } } private Uri buildQueryUri(int start, int end, String searchQuery) { Uri rootUri = searchQuery == null ? Instances.CONTENT_BY_DAY_URI : Instances.CONTENT_SEARCH_BY_DAY_URI; Uri.Builder builder = rootUri.buildUpon(); ContentUris.appendId(builder, start); ContentUris.appendId(builder, end); if (searchQuery != null) { builder.appendPath(searchQuery); } return builder.build(); } private boolean isInRange(int start, int end) { synchronized (mAdapterInfos) { if (mAdapterInfos.isEmpty()) { return false; } return mAdapterInfos.getFirst().start <= start && end <= mAdapterInfos.getLast().end; } } private int calculateQueryDuration(int start, int end) { int queryDuration = MAX_QUERY_DURATION; if (mRowCount != 0) { queryDuration = IDEAL_NUM_OF_EVENTS * (end - start + 1) / mRowCount; } if (queryDuration > MAX_QUERY_DURATION) { queryDuration = MAX_QUERY_DURATION; } else if (queryDuration < MIN_QUERY_DURATION) { queryDuration = MIN_QUERY_DURATION; } return queryDuration; } private boolean queueQuery(int start, int end, Time goToTime, String searchQuery, int queryType, long id) { QuerySpec queryData = new QuerySpec(queryType); queryData.goToTime = goToTime; queryData.start = start; queryData.end = end; queryData.searchQuery = searchQuery; queryData.id = id; return queueQuery(queryData); } private boolean queueQuery(QuerySpec queryData) { queryData.searchQuery = mSearchQuery; Boolean queuedQuery; synchronized (mQueryQueue) { queuedQuery = false; Boolean doQueryNow = mQueryQueue.isEmpty(); mQueryQueue.add(queryData); queuedQuery = true; if (doQueryNow) { doQuery(queryData); } } return queuedQuery; } private void doQuery(QuerySpec queryData) { if (!mAdapterInfos.isEmpty()) { int start = mAdapterInfos.getFirst().start; int end = mAdapterInfos.getLast().end; int queryDuration = calculateQueryDuration(start, end); switch(queryData.queryType) { case QUERY_TYPE_OLDER: queryData.end = start - 1; queryData.start = queryData.end - queryDuration; break; case QUERY_TYPE_NEWER: queryData.start = end + 1; queryData.end = queryData.start + queryDuration; break; } // By "compacting" cursors, this fixes the disco/ping-pong problem // b/5311977 if (mRowCount < 20 && queryData.queryType != QUERY_TYPE_CLEAN) { if (DEBUGLOG) { Log.e(TAG, "Compacting cursor: mRowCount=" + mRowCount + " totalStart:" + start + " totalEnd:" + end + " query.start:" + queryData.start + " query.end:" + queryData.end); } queryData.queryType = QUERY_TYPE_CLEAN; if (queryData.start > start) { queryData.start = start; } if (queryData.end < end) { queryData.end = end; } } } if (BASICLOG) { Time time = new Time(mTimeZone); time.setJulianDay(queryData.start); Time time2 = new Time(mTimeZone); time2.setJulianDay(queryData.end); Log.v(TAG, "startQuery: " + time.toString() + " to " + time2.toString() + " then go to " + queryData.goToTime); } mQueryHandler.cancelOperation(0); if (BASICLOG) queryData.queryStartMillis = System.nanoTime(); Uri queryUri = buildQueryUri( queryData.start, queryData.end, queryData.searchQuery); mQueryHandler.startQuery(0, queryData, queryUri, PROJECTION, buildQuerySelection(), null, AGENDA_SORT_ORDER); } private String formatDateString(int julianDay) { Time time = new Time(mTimeZone); time.setJulianDay(julianDay); long millis = time.toMillis(false); mStringBuilder.setLength(0); return DateUtils.formatDateRange(mContext, mFormatter, millis, millis, DateUtils.FORMAT_SHOW_YEAR | DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_ABBREV_MONTH, mTimeZone).toString(); } private void updateHeaderFooter(final int start, final int end) { mHeaderView.setText(mContext.getString(R.string.show_older_events, formatDateString(start))); mFooterView.setText(mContext.getString(R.string.show_newer_events, formatDateString(end))); } private class QueryHandler extends AsyncQueryHandler { public QueryHandler(ContentResolver cr) { super(cr); } @Override protected void onQueryComplete(int token, Object cookie, Cursor cursor) { QuerySpec data = (QuerySpec)cookie; if (BASICLOG) { long queryEndMillis = System.nanoTime(); Log.e(TAG, "Query time(ms): " + (queryEndMillis - data.queryStartMillis) / 1000000 + " Count: " + cursor.getCount()); } if (data.queryType == QUERY_TYPE_CLEAN) { mCleanQueryInitiated = false; } if (mShuttingDown) { cursor.close(); return; } // Notify Listview of changes and update position int cursorSize = cursor.getCount(); if (cursorSize > 0 || mAdapterInfos.isEmpty() || data.queryType == QUERY_TYPE_CLEAN) { final int listPositionOffset = processNewCursor(data, cursor); int newPosition = -1; if (data.goToTime == null) { // Typical Scrolling type query notifyDataSetChanged(); if (listPositionOffset != 0) { mAgendaListView.shiftSelection(listPositionOffset); } } else { // refresh() called. Go to the designated position final Time goToTime = data.goToTime; notifyDataSetChanged(); newPosition = findEventPositionNearestTime(goToTime, data.id); if (newPosition >= 0) { if (mListViewScrollState == OnScrollListener.SCROLL_STATE_FLING) { mAgendaListView.smoothScrollBy(0, 0); } mAgendaListView.setSelectionFromTop(newPosition + OFF_BY_ONE_BUG, mStickyHeaderSize); Time actualTime = new Time(mTimeZone); actualTime.set(goToTime); CalendarController.getInstance(mContext).sendEvent(this, EventType.UPDATE_TITLE, actualTime, actualTime, -1, ViewType.CURRENT); } if (DEBUGLOG) { Log.e(TAG, "Setting listview to " + "findEventPositionNearestTime: " + (newPosition + OFF_BY_ONE_BUG)); } } // Make sure we change the selected instance Id only on a clean query and we // do not have one set already if (mSelectedInstanceId == -1 && newPosition != -1 && data.queryType == QUERY_TYPE_CLEAN) { if (data.id != -1 || data.goToTime != null) { mSelectedInstanceId = findInstanceIdFromPosition(newPosition); } } // size == 1 means a fresh query. Possibly after the data changed. // Let's check whether mSelectedInstanceId is still valid. if (mAdapterInfos.size() == 1 && mSelectedInstanceId != -1) { boolean found = false; cursor.moveToPosition(-1); while (cursor.moveToNext()) { if (mSelectedInstanceId == cursor .getLong(AgendaWindowAdapter.INDEX_INSTANCE_ID)) { found = true; break; } }; if (!found) { mSelectedInstanceId = -1; } } // Show the requested event if (mShowEventOnStart && data.queryType == QUERY_TYPE_CLEAN) { Cursor tempCursor = null; int tempCursorPosition = -1; // If no valid event is selected , just pick the first one if (mSelectedInstanceId == -1) { if (cursor.moveToFirst()) { mSelectedInstanceId = cursor .getLong(AgendaWindowAdapter.INDEX_INSTANCE_ID); // Set up a dummy view holder so we have the right all day // info when the view is created. // TODO determine the full set of what might be useful to // know about the selected view and fill it in. mSelectedVH = new AgendaAdapter.ViewHolder(); mSelectedVH.allDay = cursor.getInt(AgendaWindowAdapter.INDEX_ALL_DAY) != 0; tempCursor = cursor; } } else if (newPosition != -1) { tempCursor = getCursorByPosition(newPosition); tempCursorPosition = getCursorPositionByPosition(newPosition); } if (tempCursor != null) { EventInfo event = buildEventInfoFromCursor(tempCursor, tempCursorPosition, false); CalendarController.getInstance(mContext).sendEventRelatedEventWithExtra( this, EventType.VIEW_EVENT, event.id, event.begin, event.end, 0, 0, CalendarController.EventInfo.buildViewExtraLong( Attendees.ATTENDEE_STATUS_NONE, event.allDay), -1); } } } else { cursor.close(); } // Update header and footer if (!mDoneSettingUpHeaderFooter) { OnClickListener headerFooterOnClickListener = new OnClickListener() { public void onClick(View v) { if (v == mHeaderView) { queueQuery(new QuerySpec(QUERY_TYPE_OLDER)); } else { queueQuery(new QuerySpec(QUERY_TYPE_NEWER)); } }}; mHeaderView.setOnClickListener(headerFooterOnClickListener); mFooterView.setOnClickListener(headerFooterOnClickListener); mAgendaListView.addFooterView(mFooterView); mDoneSettingUpHeaderFooter = true; } synchronized (mQueryQueue) { int totalAgendaRangeStart = -1; int totalAgendaRangeEnd = -1; if (cursorSize != 0) { // Remove the query that just completed QuerySpec x = mQueryQueue.poll(); if (BASICLOG && !x.equals(data)) { Log.e(TAG, "onQueryComplete - cookie != head of queue"); } mEmptyCursorCount = 0; if (data.queryType == QUERY_TYPE_NEWER) { mNewerRequestsProcessed++; } else if (data.queryType == QUERY_TYPE_OLDER) { mOlderRequestsProcessed++; } totalAgendaRangeStart = mAdapterInfos.getFirst().start; totalAgendaRangeEnd = mAdapterInfos.getLast().end; } else { // CursorSize == 0 QuerySpec querySpec = mQueryQueue.peek(); // Update Adapter Info with new start and end date range if (!mAdapterInfos.isEmpty()) { DayAdapterInfo first = mAdapterInfos.getFirst(); DayAdapterInfo last = mAdapterInfos.getLast(); if (first.start - 1 <= querySpec.end && querySpec.start < first.start) { first.start = querySpec.start; } if (querySpec.start <= last.end + 1 && last.end < querySpec.end) { last.end = querySpec.end; } totalAgendaRangeStart = first.start; totalAgendaRangeEnd = last.end; } else { totalAgendaRangeStart = querySpec.start; totalAgendaRangeEnd = querySpec.end; } // Update query specification with expanded search range // and maybe rerun query switch (querySpec.queryType) { case QUERY_TYPE_OLDER: totalAgendaRangeStart = querySpec.start; querySpec.start -= MAX_QUERY_DURATION; break; case QUERY_TYPE_NEWER: totalAgendaRangeEnd = querySpec.end; querySpec.end += MAX_QUERY_DURATION; break; case QUERY_TYPE_CLEAN: totalAgendaRangeStart = querySpec.start; totalAgendaRangeEnd = querySpec.end; querySpec.start -= MAX_QUERY_DURATION / 2; querySpec.end += MAX_QUERY_DURATION / 2; break; } if (++mEmptyCursorCount > RETRIES_ON_NO_DATA) { // Nothing in the cursor again. Dropping query mQueryQueue.poll(); } } updateHeaderFooter(totalAgendaRangeStart, totalAgendaRangeEnd); // Go over the events and mark the first day after yesterday // that has events in it + // If the range of adapters doesn't include yesterday, skip marking it since it will + // mark the first day in the adapters. synchronized (mAdapterInfos) { DayAdapterInfo info = mAdapterInfos.getFirst(); - if (info != null) { - Time time = new Time(mTimeZone); - long now = System.currentTimeMillis(); - time.set(now); - int JulianToday = Time.getJulianDay(now, time.gmtoff); + Time time = new Time(mTimeZone); + long now = System.currentTimeMillis(); + time.set(now); + int JulianToday = Time.getJulianDay(now, time.gmtoff); + if (info != null && JulianToday >= info.start && JulianToday + <= mAdapterInfos.getLast().end) { Iterator<DayAdapterInfo> iter = mAdapterInfos.iterator(); boolean foundDay = false; while (iter.hasNext() && !foundDay) { info = iter.next(); for (int i = 0; i < info.size; i++) { if (info.dayAdapter.findJulianDayFromPosition(i) >= JulianToday) { info.dayAdapter.setAsFirstDayAfterYesterday(i); foundDay = true; break; } } } } } // Fire off the next query if any Iterator<QuerySpec> it = mQueryQueue.iterator(); while (it.hasNext()) { QuerySpec queryData = it.next(); if (queryData.queryType == QUERY_TYPE_CLEAN || !isInRange(queryData.start, queryData.end)) { // Query accepted if (DEBUGLOG) Log.e(TAG, "Query accepted. QueueSize:" + mQueryQueue.size()); doQuery(queryData); break; } else { // Query rejected it.remove(); if (DEBUGLOG) Log.e(TAG, "Query rejected. QueueSize:" + mQueryQueue.size()); } } } if (BASICLOG) { for (DayAdapterInfo info3 : mAdapterInfos) { Log.e(TAG, "> " + info3.toString()); } } } /* * Update the adapter info array with a the new cursor. Close out old * cursors as needed. * * @return number of rows removed from the beginning */ private int processNewCursor(QuerySpec data, Cursor cursor) { synchronized (mAdapterInfos) { // Remove adapter info's from adapterInfos as needed DayAdapterInfo info = pruneAdapterInfo(data.queryType); int listPositionOffset = 0; if (info == null) { info = new DayAdapterInfo(mContext); } else { if (DEBUGLOG) Log.e(TAG, "processNewCursor listPositionOffsetA=" + -info.size); listPositionOffset = -info.size; } // Setup adapter info info.start = data.start; info.end = data.end; info.cursor = cursor; info.dayAdapter.changeCursor(info); info.size = info.dayAdapter.getCount(); // Insert into adapterInfos if (mAdapterInfos.isEmpty() || data.end <= mAdapterInfos.getFirst().start) { mAdapterInfos.addFirst(info); listPositionOffset += info.size; } else if (BASICLOG && data.start < mAdapterInfos.getLast().end) { mAdapterInfos.addLast(info); for (DayAdapterInfo info2 : mAdapterInfos) { Log.e("========== BUG ==", info2.toString()); } } else { mAdapterInfos.addLast(info); } // Update offsets in adapterInfos mRowCount = 0; for (DayAdapterInfo info3 : mAdapterInfos) { info3.offset = mRowCount; mRowCount += info3.size; } mLastUsedInfo = null; return listPositionOffset; } } } static String getViewTitle(View x) { String title = ""; if (x != null) { Object yy = x.getTag(); if (yy instanceof AgendaAdapter.ViewHolder) { TextView tv = ((AgendaAdapter.ViewHolder) yy).title; if (tv != null) { title = (String) tv.getText(); } } else if (yy != null) { TextView dateView = ((AgendaByDayAdapter.ViewHolder) yy).dateView; if (dateView != null) { title = (String) dateView.getText(); } } } return title; } public void onResume() { mTZUpdater.run(); } public void setHideDeclinedEvents(boolean hideDeclined) { mHideDeclined = hideDeclined; } public void setSelectedView(View v) { if (v != null) { Object vh = v.getTag(); if (vh instanceof AgendaAdapter.ViewHolder) { mSelectedVH = (AgendaAdapter.ViewHolder) vh; if (mSelectedInstanceId != mSelectedVH.instanceId) { mSelectedInstanceId = mSelectedVH.instanceId; notifyDataSetChanged(); } } } } public AgendaAdapter.ViewHolder getSelectedViewHolder() { return mSelectedVH; } public long getSelectedInstanceId() { return mSelectedInstanceId; } public void setSelectedInstanceId(long selectedInstanceId) { mSelectedInstanceId = selectedInstanceId; mSelectedVH = null; } private long findInstanceIdFromPosition(int position) { DayAdapterInfo info = getAdapterInfoByPosition(position); if (info != null) { return info.dayAdapter.getInstanceId(position - info.offset); } return -1; } private Cursor getCursorByPosition(int position) { DayAdapterInfo info = getAdapterInfoByPosition(position); if (info != null) { return info.cursor; } return null; } private int getCursorPositionByPosition(int position) { DayAdapterInfo info = getAdapterInfoByPosition(position); if (info != null) { return info.dayAdapter.getCursorPosition(position - info.offset); } return -1; } // Implementation of HeaderIndexer interface for StickyHeeaderListView // Returns the location of the day header of a specific event specified in the position // in the adapter @Override public int getHeaderPositionFromItemPosition(int position) { // For phone configuration, return -1 so there will be no sticky header if (!mIsTabletConfig) { return -1; } DayAdapterInfo info = getAdapterInfoByPosition(position); if (info != null) { int pos = info.dayAdapter.getHeaderPosition(position - info.offset); return (pos != -1)?(pos + info.offset):-1; } return -1; } // Returns the number of events for a specific day header @Override public int getHeaderItemsNumber(int headerPosition) { if (headerPosition < 0 || !mIsTabletConfig) { return -1; } DayAdapterInfo info = getAdapterInfoByPosition(headerPosition); if (info != null) { return info.dayAdapter.getHeaderItemsCount(headerPosition - info.offset); } return -1; } @Override public void OnHeaderHeightChanged(int height) { mStickyHeaderSize = height; } public int getStickyHeaderHeight() { return mStickyHeaderSize; } public void setScrollState(int state) { mListViewScrollState = state; } }
false
true
protected void onQueryComplete(int token, Object cookie, Cursor cursor) { QuerySpec data = (QuerySpec)cookie; if (BASICLOG) { long queryEndMillis = System.nanoTime(); Log.e(TAG, "Query time(ms): " + (queryEndMillis - data.queryStartMillis) / 1000000 + " Count: " + cursor.getCount()); } if (data.queryType == QUERY_TYPE_CLEAN) { mCleanQueryInitiated = false; } if (mShuttingDown) { cursor.close(); return; } // Notify Listview of changes and update position int cursorSize = cursor.getCount(); if (cursorSize > 0 || mAdapterInfos.isEmpty() || data.queryType == QUERY_TYPE_CLEAN) { final int listPositionOffset = processNewCursor(data, cursor); int newPosition = -1; if (data.goToTime == null) { // Typical Scrolling type query notifyDataSetChanged(); if (listPositionOffset != 0) { mAgendaListView.shiftSelection(listPositionOffset); } } else { // refresh() called. Go to the designated position final Time goToTime = data.goToTime; notifyDataSetChanged(); newPosition = findEventPositionNearestTime(goToTime, data.id); if (newPosition >= 0) { if (mListViewScrollState == OnScrollListener.SCROLL_STATE_FLING) { mAgendaListView.smoothScrollBy(0, 0); } mAgendaListView.setSelectionFromTop(newPosition + OFF_BY_ONE_BUG, mStickyHeaderSize); Time actualTime = new Time(mTimeZone); actualTime.set(goToTime); CalendarController.getInstance(mContext).sendEvent(this, EventType.UPDATE_TITLE, actualTime, actualTime, -1, ViewType.CURRENT); } if (DEBUGLOG) { Log.e(TAG, "Setting listview to " + "findEventPositionNearestTime: " + (newPosition + OFF_BY_ONE_BUG)); } } // Make sure we change the selected instance Id only on a clean query and we // do not have one set already if (mSelectedInstanceId == -1 && newPosition != -1 && data.queryType == QUERY_TYPE_CLEAN) { if (data.id != -1 || data.goToTime != null) { mSelectedInstanceId = findInstanceIdFromPosition(newPosition); } } // size == 1 means a fresh query. Possibly after the data changed. // Let's check whether mSelectedInstanceId is still valid. if (mAdapterInfos.size() == 1 && mSelectedInstanceId != -1) { boolean found = false; cursor.moveToPosition(-1); while (cursor.moveToNext()) { if (mSelectedInstanceId == cursor .getLong(AgendaWindowAdapter.INDEX_INSTANCE_ID)) { found = true; break; } }; if (!found) { mSelectedInstanceId = -1; } } // Show the requested event if (mShowEventOnStart && data.queryType == QUERY_TYPE_CLEAN) { Cursor tempCursor = null; int tempCursorPosition = -1; // If no valid event is selected , just pick the first one if (mSelectedInstanceId == -1) { if (cursor.moveToFirst()) { mSelectedInstanceId = cursor .getLong(AgendaWindowAdapter.INDEX_INSTANCE_ID); // Set up a dummy view holder so we have the right all day // info when the view is created. // TODO determine the full set of what might be useful to // know about the selected view and fill it in. mSelectedVH = new AgendaAdapter.ViewHolder(); mSelectedVH.allDay = cursor.getInt(AgendaWindowAdapter.INDEX_ALL_DAY) != 0; tempCursor = cursor; } } else if (newPosition != -1) { tempCursor = getCursorByPosition(newPosition); tempCursorPosition = getCursorPositionByPosition(newPosition); } if (tempCursor != null) { EventInfo event = buildEventInfoFromCursor(tempCursor, tempCursorPosition, false); CalendarController.getInstance(mContext).sendEventRelatedEventWithExtra( this, EventType.VIEW_EVENT, event.id, event.begin, event.end, 0, 0, CalendarController.EventInfo.buildViewExtraLong( Attendees.ATTENDEE_STATUS_NONE, event.allDay), -1); } } } else { cursor.close(); } // Update header and footer if (!mDoneSettingUpHeaderFooter) { OnClickListener headerFooterOnClickListener = new OnClickListener() { public void onClick(View v) { if (v == mHeaderView) { queueQuery(new QuerySpec(QUERY_TYPE_OLDER)); } else { queueQuery(new QuerySpec(QUERY_TYPE_NEWER)); } }}; mHeaderView.setOnClickListener(headerFooterOnClickListener); mFooterView.setOnClickListener(headerFooterOnClickListener); mAgendaListView.addFooterView(mFooterView); mDoneSettingUpHeaderFooter = true; } synchronized (mQueryQueue) { int totalAgendaRangeStart = -1; int totalAgendaRangeEnd = -1; if (cursorSize != 0) { // Remove the query that just completed QuerySpec x = mQueryQueue.poll(); if (BASICLOG && !x.equals(data)) { Log.e(TAG, "onQueryComplete - cookie != head of queue"); } mEmptyCursorCount = 0; if (data.queryType == QUERY_TYPE_NEWER) { mNewerRequestsProcessed++; } else if (data.queryType == QUERY_TYPE_OLDER) { mOlderRequestsProcessed++; } totalAgendaRangeStart = mAdapterInfos.getFirst().start; totalAgendaRangeEnd = mAdapterInfos.getLast().end; } else { // CursorSize == 0 QuerySpec querySpec = mQueryQueue.peek(); // Update Adapter Info with new start and end date range if (!mAdapterInfos.isEmpty()) { DayAdapterInfo first = mAdapterInfos.getFirst(); DayAdapterInfo last = mAdapterInfos.getLast(); if (first.start - 1 <= querySpec.end && querySpec.start < first.start) { first.start = querySpec.start; } if (querySpec.start <= last.end + 1 && last.end < querySpec.end) { last.end = querySpec.end; } totalAgendaRangeStart = first.start; totalAgendaRangeEnd = last.end; } else { totalAgendaRangeStart = querySpec.start; totalAgendaRangeEnd = querySpec.end; } // Update query specification with expanded search range // and maybe rerun query switch (querySpec.queryType) { case QUERY_TYPE_OLDER: totalAgendaRangeStart = querySpec.start; querySpec.start -= MAX_QUERY_DURATION; break; case QUERY_TYPE_NEWER: totalAgendaRangeEnd = querySpec.end; querySpec.end += MAX_QUERY_DURATION; break; case QUERY_TYPE_CLEAN: totalAgendaRangeStart = querySpec.start; totalAgendaRangeEnd = querySpec.end; querySpec.start -= MAX_QUERY_DURATION / 2; querySpec.end += MAX_QUERY_DURATION / 2; break; } if (++mEmptyCursorCount > RETRIES_ON_NO_DATA) { // Nothing in the cursor again. Dropping query mQueryQueue.poll(); } } updateHeaderFooter(totalAgendaRangeStart, totalAgendaRangeEnd); // Go over the events and mark the first day after yesterday // that has events in it synchronized (mAdapterInfos) { DayAdapterInfo info = mAdapterInfos.getFirst(); if (info != null) { Time time = new Time(mTimeZone); long now = System.currentTimeMillis(); time.set(now); int JulianToday = Time.getJulianDay(now, time.gmtoff); Iterator<DayAdapterInfo> iter = mAdapterInfos.iterator(); boolean foundDay = false; while (iter.hasNext() && !foundDay) { info = iter.next(); for (int i = 0; i < info.size; i++) { if (info.dayAdapter.findJulianDayFromPosition(i) >= JulianToday) { info.dayAdapter.setAsFirstDayAfterYesterday(i); foundDay = true; break; } } } } } // Fire off the next query if any Iterator<QuerySpec> it = mQueryQueue.iterator(); while (it.hasNext()) { QuerySpec queryData = it.next(); if (queryData.queryType == QUERY_TYPE_CLEAN || !isInRange(queryData.start, queryData.end)) { // Query accepted if (DEBUGLOG) Log.e(TAG, "Query accepted. QueueSize:" + mQueryQueue.size()); doQuery(queryData); break; } else { // Query rejected it.remove(); if (DEBUGLOG) Log.e(TAG, "Query rejected. QueueSize:" + mQueryQueue.size()); } } } if (BASICLOG) { for (DayAdapterInfo info3 : mAdapterInfos) { Log.e(TAG, "> " + info3.toString()); } } }
protected void onQueryComplete(int token, Object cookie, Cursor cursor) { QuerySpec data = (QuerySpec)cookie; if (BASICLOG) { long queryEndMillis = System.nanoTime(); Log.e(TAG, "Query time(ms): " + (queryEndMillis - data.queryStartMillis) / 1000000 + " Count: " + cursor.getCount()); } if (data.queryType == QUERY_TYPE_CLEAN) { mCleanQueryInitiated = false; } if (mShuttingDown) { cursor.close(); return; } // Notify Listview of changes and update position int cursorSize = cursor.getCount(); if (cursorSize > 0 || mAdapterInfos.isEmpty() || data.queryType == QUERY_TYPE_CLEAN) { final int listPositionOffset = processNewCursor(data, cursor); int newPosition = -1; if (data.goToTime == null) { // Typical Scrolling type query notifyDataSetChanged(); if (listPositionOffset != 0) { mAgendaListView.shiftSelection(listPositionOffset); } } else { // refresh() called. Go to the designated position final Time goToTime = data.goToTime; notifyDataSetChanged(); newPosition = findEventPositionNearestTime(goToTime, data.id); if (newPosition >= 0) { if (mListViewScrollState == OnScrollListener.SCROLL_STATE_FLING) { mAgendaListView.smoothScrollBy(0, 0); } mAgendaListView.setSelectionFromTop(newPosition + OFF_BY_ONE_BUG, mStickyHeaderSize); Time actualTime = new Time(mTimeZone); actualTime.set(goToTime); CalendarController.getInstance(mContext).sendEvent(this, EventType.UPDATE_TITLE, actualTime, actualTime, -1, ViewType.CURRENT); } if (DEBUGLOG) { Log.e(TAG, "Setting listview to " + "findEventPositionNearestTime: " + (newPosition + OFF_BY_ONE_BUG)); } } // Make sure we change the selected instance Id only on a clean query and we // do not have one set already if (mSelectedInstanceId == -1 && newPosition != -1 && data.queryType == QUERY_TYPE_CLEAN) { if (data.id != -1 || data.goToTime != null) { mSelectedInstanceId = findInstanceIdFromPosition(newPosition); } } // size == 1 means a fresh query. Possibly after the data changed. // Let's check whether mSelectedInstanceId is still valid. if (mAdapterInfos.size() == 1 && mSelectedInstanceId != -1) { boolean found = false; cursor.moveToPosition(-1); while (cursor.moveToNext()) { if (mSelectedInstanceId == cursor .getLong(AgendaWindowAdapter.INDEX_INSTANCE_ID)) { found = true; break; } }; if (!found) { mSelectedInstanceId = -1; } } // Show the requested event if (mShowEventOnStart && data.queryType == QUERY_TYPE_CLEAN) { Cursor tempCursor = null; int tempCursorPosition = -1; // If no valid event is selected , just pick the first one if (mSelectedInstanceId == -1) { if (cursor.moveToFirst()) { mSelectedInstanceId = cursor .getLong(AgendaWindowAdapter.INDEX_INSTANCE_ID); // Set up a dummy view holder so we have the right all day // info when the view is created. // TODO determine the full set of what might be useful to // know about the selected view and fill it in. mSelectedVH = new AgendaAdapter.ViewHolder(); mSelectedVH.allDay = cursor.getInt(AgendaWindowAdapter.INDEX_ALL_DAY) != 0; tempCursor = cursor; } } else if (newPosition != -1) { tempCursor = getCursorByPosition(newPosition); tempCursorPosition = getCursorPositionByPosition(newPosition); } if (tempCursor != null) { EventInfo event = buildEventInfoFromCursor(tempCursor, tempCursorPosition, false); CalendarController.getInstance(mContext).sendEventRelatedEventWithExtra( this, EventType.VIEW_EVENT, event.id, event.begin, event.end, 0, 0, CalendarController.EventInfo.buildViewExtraLong( Attendees.ATTENDEE_STATUS_NONE, event.allDay), -1); } } } else { cursor.close(); } // Update header and footer if (!mDoneSettingUpHeaderFooter) { OnClickListener headerFooterOnClickListener = new OnClickListener() { public void onClick(View v) { if (v == mHeaderView) { queueQuery(new QuerySpec(QUERY_TYPE_OLDER)); } else { queueQuery(new QuerySpec(QUERY_TYPE_NEWER)); } }}; mHeaderView.setOnClickListener(headerFooterOnClickListener); mFooterView.setOnClickListener(headerFooterOnClickListener); mAgendaListView.addFooterView(mFooterView); mDoneSettingUpHeaderFooter = true; } synchronized (mQueryQueue) { int totalAgendaRangeStart = -1; int totalAgendaRangeEnd = -1; if (cursorSize != 0) { // Remove the query that just completed QuerySpec x = mQueryQueue.poll(); if (BASICLOG && !x.equals(data)) { Log.e(TAG, "onQueryComplete - cookie != head of queue"); } mEmptyCursorCount = 0; if (data.queryType == QUERY_TYPE_NEWER) { mNewerRequestsProcessed++; } else if (data.queryType == QUERY_TYPE_OLDER) { mOlderRequestsProcessed++; } totalAgendaRangeStart = mAdapterInfos.getFirst().start; totalAgendaRangeEnd = mAdapterInfos.getLast().end; } else { // CursorSize == 0 QuerySpec querySpec = mQueryQueue.peek(); // Update Adapter Info with new start and end date range if (!mAdapterInfos.isEmpty()) { DayAdapterInfo first = mAdapterInfos.getFirst(); DayAdapterInfo last = mAdapterInfos.getLast(); if (first.start - 1 <= querySpec.end && querySpec.start < first.start) { first.start = querySpec.start; } if (querySpec.start <= last.end + 1 && last.end < querySpec.end) { last.end = querySpec.end; } totalAgendaRangeStart = first.start; totalAgendaRangeEnd = last.end; } else { totalAgendaRangeStart = querySpec.start; totalAgendaRangeEnd = querySpec.end; } // Update query specification with expanded search range // and maybe rerun query switch (querySpec.queryType) { case QUERY_TYPE_OLDER: totalAgendaRangeStart = querySpec.start; querySpec.start -= MAX_QUERY_DURATION; break; case QUERY_TYPE_NEWER: totalAgendaRangeEnd = querySpec.end; querySpec.end += MAX_QUERY_DURATION; break; case QUERY_TYPE_CLEAN: totalAgendaRangeStart = querySpec.start; totalAgendaRangeEnd = querySpec.end; querySpec.start -= MAX_QUERY_DURATION / 2; querySpec.end += MAX_QUERY_DURATION / 2; break; } if (++mEmptyCursorCount > RETRIES_ON_NO_DATA) { // Nothing in the cursor again. Dropping query mQueryQueue.poll(); } } updateHeaderFooter(totalAgendaRangeStart, totalAgendaRangeEnd); // Go over the events and mark the first day after yesterday // that has events in it // If the range of adapters doesn't include yesterday, skip marking it since it will // mark the first day in the adapters. synchronized (mAdapterInfos) { DayAdapterInfo info = mAdapterInfos.getFirst(); Time time = new Time(mTimeZone); long now = System.currentTimeMillis(); time.set(now); int JulianToday = Time.getJulianDay(now, time.gmtoff); if (info != null && JulianToday >= info.start && JulianToday <= mAdapterInfos.getLast().end) { Iterator<DayAdapterInfo> iter = mAdapterInfos.iterator(); boolean foundDay = false; while (iter.hasNext() && !foundDay) { info = iter.next(); for (int i = 0; i < info.size; i++) { if (info.dayAdapter.findJulianDayFromPosition(i) >= JulianToday) { info.dayAdapter.setAsFirstDayAfterYesterday(i); foundDay = true; break; } } } } } // Fire off the next query if any Iterator<QuerySpec> it = mQueryQueue.iterator(); while (it.hasNext()) { QuerySpec queryData = it.next(); if (queryData.queryType == QUERY_TYPE_CLEAN || !isInRange(queryData.start, queryData.end)) { // Query accepted if (DEBUGLOG) Log.e(TAG, "Query accepted. QueueSize:" + mQueryQueue.size()); doQuery(queryData); break; } else { // Query rejected it.remove(); if (DEBUGLOG) Log.e(TAG, "Query rejected. QueueSize:" + mQueryQueue.size()); } } } if (BASICLOG) { for (DayAdapterInfo info3 : mAdapterInfos) { Log.e(TAG, "> " + info3.toString()); } } }
diff --git a/sip-servlets-jboss5/src/main/java/org/jboss/web/tomcat/service/injection/ConvergedSipResourceHandler.java b/sip-servlets-jboss5/src/main/java/org/jboss/web/tomcat/service/injection/ConvergedSipResourceHandler.java index 153a189b5..35cfb9fc3 100644 --- a/sip-servlets-jboss5/src/main/java/org/jboss/web/tomcat/service/injection/ConvergedSipResourceHandler.java +++ b/sip-servlets-jboss5/src/main/java/org/jboss/web/tomcat/service/injection/ConvergedSipResourceHandler.java @@ -1,509 +1,512 @@ /* * JBoss, Home of Professional Open Source * Copyright 2008, Red Hat Middleware LLC, and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.web.tomcat.service.injection; import java.lang.reflect.AccessibleObject; import java.lang.reflect.Field; import java.net.MalformedURLException; import java.net.URL; import java.util.Collection; import java.util.Map; import javax.annotation.Resource; import javax.ejb.EJBException; import javax.ejb.SessionContext; import javax.ejb.TimerService; import javax.servlet.sip.SipFactory; import javax.servlet.sip.SipSessionsUtil; import javax.transaction.UserTransaction; import javax.xml.ws.WebServiceContext; import org.jboss.ejb3.Container; import org.jboss.injection.EnvEntryEncInjector; import org.jboss.injection.InjectionContainer; import org.jboss.injection.InjectionUtil; import org.jboss.injection.Injector; import org.jboss.injection.InjectorFactory; import org.jboss.injection.JndiPropertyInjector; import org.jboss.injection.LinkRefEncInjector; import org.jboss.injection.SipFactoryPropertyInjector; import org.jboss.injection.SipSessionsUtilPropertyInjector; import org.jboss.injection.SipTimerServicePropertyInjector; import org.jboss.injection.TimerServicePropertyInjector; import org.jboss.injection.UserTransactionPropertyInjector; import org.jboss.injection.ValueEncInjector; import org.jboss.injection.lang.reflect.BeanProperty; import org.jboss.injection.lang.reflect.FieldBeanProperty; import org.jboss.logging.Logger; import org.jboss.metadata.javaee.spec.EnvironmentEntryMetaData; import org.jboss.metadata.javaee.spec.MessageDestinationReferenceMetaData; import org.jboss.metadata.javaee.spec.RemoteEnvironment; import org.jboss.metadata.javaee.spec.ResourceEnvironmentReferenceMetaData; import org.jboss.metadata.javaee.spec.ResourceReferenceMetaData; import org.jboss.reflect.plugins.ValueConvertor; import org.jboss.web.tomcat.service.TomcatConvergedSipInjectionContainer; import org.mobicents.servlet.sip.startup.SipContext; import org.omg.CORBA.ORB; /** * InjectionHandler for sip components. Allows to inject SipFactory, SipSessionsUtil and TimerService from @Resource annotation field * * @author [email protected] * */ public class ConvergedSipResourceHandler<X extends RemoteEnvironment> extends WebResourceHandler<X> { private static final Logger log = Logger.getLogger(ConvergedSipResourceHandler.class); private boolean checkEncInjectors; public ConvergedSipResourceHandler() { this(true); } public ConvergedSipResourceHandler(boolean checkEncInjectors) { this.checkEncInjectors = checkEncInjectors; } public void handleFieldAnnotations(Field field, InjectionContainer container, Map<AccessibleObject, Injector> injectors) { Resource ref = container.getAnnotation(Resource.class, field); if (ref == null) return; log.trace("field " + field + " has @Resource"); handlePropertyAnnotation(ref, new FieldBeanProperty(field), container, injectors); } private static void createURLInjector(String encName, String mappedName, InjectionContainer container) { assert encName.length() > 0 : "encName is empty"; assert mappedName.length() > 0 : "mappedName is empty"; // Create a URL from the mappedName try { URL url = new URL(mappedName.trim()); container.getEncInjectors().put(encName, new ValueEncInjector(encName, url, "@Resource")); } catch (MalformedURLException e) { throw new RuntimeException("failed to create url injector for: " + encName, e); } } private void handlePropertyAnnotation(Resource ref, BeanProperty property, InjectionContainer container, Map<AccessibleObject, Injector> injectors) { assert ref != null; assert property != null; assert container != null; assert injectors != null; String encName = ref.name(); if (encName == null || encName.equals("")) { //encName = InjectionUtil.getEncName(field); encName = property.getDeclaringClass().getName() + "/" + property.getName(); } if (!encName.startsWith("env/")) { encName = "env/" + encName; } AccessibleObject accObj = property.getAccessibleObject(); Class<?> type = property.getType(); if (!ref.type().equals(Object.class)) { type = ref.type(); } if (type.equals(UserTransaction.class)) { injectors.put(accObj, new UserTransactionPropertyInjector(property, container)); } else if (type.equals(TimerService.class)) { injectors.put(accObj, new TimerServicePropertyInjector(property, (Container) container)); // only EJBs } //Injects a Sip Factory else if (type.equals(SipFactory.class)) { injectors.put(accObj, new SipFactoryPropertyInjector(property, container)); } //Injects a SipSessionsUtil else if (type.equals(SipSessionsUtil.class)) { injectors.put(accObj, new SipSessionsUtilPropertyInjector(property, container)); } //Injects a Timer Service else if (type.equals(javax.servlet.sip.TimerService.class)) { injectors.put(accObj, new SipTimerServicePropertyInjector(property, container)); } else if(type.equals(URL.class) && ref.mappedName() != null && ref.mappedName().length() > 0) { createURLInjector(encName, ref.mappedName(), container); injectors.put(accObj, new JndiPropertyInjector(property, encName, container.getEnc())); } else if (type.equals(String.class) || type.equals(Character.class) || type.equals(Byte.class) || type.equals(Short.class) || type.equals(Integer.class) || type.equals(Long.class) || type.equals(Boolean.class) || type.equals(Double.class) || type.equals(Float.class) || type.isPrimitive() ) { // don't add an injector if no XML <env-entry is present as there will be no value to inject if (container.getEncInjectors().containsKey(encName)) { injectors.put(accObj, new JndiPropertyInjector(property, encName, container.getEnc())); } else if (ref.mappedName() != null && ref.mappedName().length() > 0) { // Use the mappedName as the string value String s = ref.mappedName().trim(); try { Object value = ValueConvertor.convertValue(type, s); container.getEncInjectors().put(encName, new ValueEncInjector(encName, value, "@Resource")); injectors.put(accObj, new JndiPropertyInjector(property, encName, container.getEnc())); } catch(Throwable t) { throw new RuntimeException("Failed to convert: "+ref.mappedName()+" to type:"+type, t); } } else { log.warn("Not injecting " + property.getName() + ", no matching enc injector " + encName + " found"); } } else { if (checkEncInjectors && !container.getEncInjectors().containsKey(encName)) { String mappedName = ref.mappedName(); if (mappedName == null || mappedName.equals("")) { // TODO: is this a nice trick? // if(ConnectionFactory.class.isAssignableFrom(type)) // { // // neat little trick // mappedName = "java:/ConnectionFactory"; // } // else if(ORB.class.isAssignableFrom(type)) mappedName = "java:comp/ORB"; else throw new RuntimeException("You did not specify a @Resource.mappedName() on " + accObj + " and there is no binding for enc name " + encName + " in XML"); } container.getEncInjectors().put(encName, new LinkRefEncInjector(encName, mappedName, "@Resource")); } injectors.put(accObj, new JndiPropertyInjector(property, encName, container.getEnc())); } } public void loadXml(X xml, InjectionContainer container) { if (xml == null) return; if (xml.getMessageDestinationReferences() != null) loadXmlMessageDestinationRefs(container, xml.getMessageDestinationReferences()); if (xml.getResourceEnvironmentReferences() != null) loadXmlResourceEnvRefs(container, xml.getResourceEnvironmentReferences()); if (xml.getResourceReferences() != null) loadXmlResourceRefs(container, xml.getResourceReferences()); if (xml.getEnvironmentEntries() != null) loadEnvEntry(container, xml.getEnvironmentEntries()); } private static void loadEnvEntry(InjectionContainer container, Collection<EnvironmentEntryMetaData> envEntries) { for (EnvironmentEntryMetaData envEntry : envEntries) { String encName = "env/" + envEntry.getEnvEntryName(); // 16.4.1.3: If the env-entry-value is not specified, no value will be injected and it // will not be initialized into the naming context. if (envEntry.getValue() == null) { log.debug("ignoring env-entry " + envEntry); continue; } InjectionUtil.injectionTarget(encName, envEntry, container, container.getEncInjections()); if (container.getEncInjectors().containsKey(encName)) continue; log.trace("adding env-entry injector " + encName); container.getEncInjectors().put(encName, new EnvEntryEncInjector(encName, envEntry.getType(), envEntry.getValue())); } } private static void loadXmlResourceRefs(InjectionContainer container, Collection<ResourceReferenceMetaData> refs) { for (ResourceReferenceMetaData envRef : refs) { String encName = "env/" + envRef.getResourceRefName(); if (container.getEncInjectors().containsKey(encName)) continue; String mappedName = envRef.getMappedName(); if(mappedName == null || mappedName.length() == 0) mappedName = envRef.getResolvedJndiName(); if (mappedName == null || mappedName.length() == 0) { if (envRef.getResUrl() != null) { try { container.getEncInjectors().put(encName, new ValueEncInjector(encName, new URL(envRef.getResUrl().trim()), "<resource-ref>")); } catch (MalformedURLException e) { throw new RuntimeException(e); } } else if (UserTransaction.class.getName().equals(envRef.getType())) { final InjectionContainer ic = container; InjectorFactory<?> factory = new InjectorFactory<UserTransactionPropertyInjector>() { public UserTransactionPropertyInjector create(BeanProperty property) { return new UserTransactionPropertyInjector(property, ic); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } else { encName = "java:comp/UserTransaction"; } } else if (ORB.class.getName().equals(envRef.getType())) { encName = "java:comp/ORB"; } else { throw new RuntimeException("mapped-name is required for " + envRef.getResourceRefName() + " of deployment " + container.getIdentifier()); } } else if(URL.class.getName().equals(envRef.getType()) && ! mappedName.startsWith("java:") ) { createURLInjector(encName, mappedName, container); InjectionUtil.injectionTarget(encName, envRef, container, container.getEncInjections()); continue; } else if(envRef.getResUrl() != null) { try { container.getEncInjectors().put(encName, new ValueEncInjector(encName, new URL(envRef.getResUrl().trim()), "<resource-ref>")); } catch (MalformedURLException e) { throw new RuntimeException(e); } } else { container.getEncInjectors().put(encName, new LinkRefEncInjector(encName, mappedName, "<resource-ref>")); } InjectionUtil.injectionTarget(encName, envRef, container, container.getEncInjections()); } } private static void loadXmlResourceEnvRefs(InjectionContainer container, Collection<ResourceEnvironmentReferenceMetaData> refs) { for (ResourceEnvironmentReferenceMetaData envRef : refs) { String resTypeName = envRef.getType(); String mappedName = envRef.getMappedName(); if(mappedName == null || mappedName.length() == 0) mappedName = envRef.getResolvedJndiName(); try { if(resTypeName != null) { Class<?> resType = Class.forName(resTypeName); if(TimerService.class.isAssignableFrom(resType)) { log.warn("Ignoring invalid TimerService resource-env-ref"); continue; } else if(SessionContext.class.isAssignableFrom(resType)) { log.warn("Ignoring invalid SessionContext resource-env-ref"); continue; } else if (resType.equals(UserTransaction.class)) { final InjectionContainer ic = container; InjectorFactory<?> factory = new InjectorFactory<UserTransactionPropertyInjector>() { public UserTransactionPropertyInjector create(BeanProperty property) { return new UserTransactionPropertyInjector(property, ic); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } else { mappedName = "java:comp/UserTransaction"; } } //Injects a Sip Factory else if (resType.equals(SipFactory.class)) { final InjectionContainer ic = container; InjectorFactory<?> factory = new InjectorFactory<SipFactoryPropertyInjector>() { public SipFactoryPropertyInjector create(BeanProperty property) { return new SipFactoryPropertyInjector(property, ic); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } else { mappedName = "java:comp/env/sip/"+ ((SipContext)((TomcatConvergedSipInjectionContainer)container).getCatalinaContext()).getApplicationName() +"/SipFactory"; } } //Injects a SipSessionsUtil else if (resType.equals(SipSessionsUtil.class)) { final InjectionContainer ic = container; InjectorFactory<?> factory = new InjectorFactory<SipSessionsUtilPropertyInjector>() { public SipSessionsUtilPropertyInjector create(BeanProperty property) { return new SipSessionsUtilPropertyInjector(property, ic); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } else { mappedName = "java:comp/env/sip/"+ ((SipContext)((TomcatConvergedSipInjectionContainer)container).getCatalinaContext()).getApplicationName() +"/SipSessionsUtil"; } } //Injects a Timer Service else if (resType.equals(javax.servlet.sip.TimerService.class)) { final InjectionContainer ic = container; InjectorFactory<?> factory = new InjectorFactory<SipTimerServicePropertyInjector>() { public SipTimerServicePropertyInjector create(BeanProperty property) { return new SipTimerServicePropertyInjector(property, ic); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } else { mappedName = "java:comp/env/sip/"+ ((SipContext)((TomcatConvergedSipInjectionContainer)container).getCatalinaContext()).getApplicationName() +"/TimerService"; } } else if (resType.equals(ORB.class)) { mappedName = "java:comp/ORB"; continue; } else if(WebServiceContext.class.getName().equals(envRef.getType())) { // JBAS-5359 InjectorFactory<?> factory = new InjectorFactory<WebServiceContextPropertyInjector>() { public WebServiceContextPropertyInjector create(BeanProperty property) { return new WebServiceContextPropertyInjector(property); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } } } } catch(ClassNotFoundException e) { - throw new EJBException(e); + if(log.isDebugEnabled()) { + log.debug("ClassNotFoundException while trying to inject Resource. We can ignore this error. Other handlers will signal if something is wrong." + resTypeName); + } + return; } String encName = "env/" + envRef.getResourceEnvRefName(); if (container.getEncInjectors().containsKey(encName)) continue; if (mappedName == null || mappedName.equals("")) { throw new RuntimeException("mapped-name is required for " + envRef.getResourceEnvRefName() + " of deployment " + container.getIdentifier()); } container.getEncInjectors().put(encName, new LinkRefEncInjector(encName, envRef.getMappedName(), "<resource-ref>")); InjectionUtil.injectionTarget(encName, envRef, container, container.getEncInjections()); } } private static void loadXmlMessageDestinationRefs(InjectionContainer container, Collection<MessageDestinationReferenceMetaData> refs) { for (MessageDestinationReferenceMetaData envRef : refs) { String encName = "env/" + envRef.getMessageDestinationRefName(); if (container.getEncInjectors().containsKey(encName)) continue; String jndiName = envRef.getMappedName(); if (jndiName == null || jndiName.equals("")) { jndiName = envRef.getResolvedJndiName(); if (jndiName == null || jndiName.equals("")) throw new RuntimeException("mapped-name is required for " + envRef.getMessageDestinationRefName() + " of deployment " + container.getIdentifier()); } container.getEncInjectors().put(encName, new LinkRefEncInjector(encName, jndiName, "<message-destination-ref>")); InjectionUtil.injectionTarget(encName, envRef, container, container.getEncInjections()); } } }
true
true
private static void loadXmlResourceEnvRefs(InjectionContainer container, Collection<ResourceEnvironmentReferenceMetaData> refs) { for (ResourceEnvironmentReferenceMetaData envRef : refs) { String resTypeName = envRef.getType(); String mappedName = envRef.getMappedName(); if(mappedName == null || mappedName.length() == 0) mappedName = envRef.getResolvedJndiName(); try { if(resTypeName != null) { Class<?> resType = Class.forName(resTypeName); if(TimerService.class.isAssignableFrom(resType)) { log.warn("Ignoring invalid TimerService resource-env-ref"); continue; } else if(SessionContext.class.isAssignableFrom(resType)) { log.warn("Ignoring invalid SessionContext resource-env-ref"); continue; } else if (resType.equals(UserTransaction.class)) { final InjectionContainer ic = container; InjectorFactory<?> factory = new InjectorFactory<UserTransactionPropertyInjector>() { public UserTransactionPropertyInjector create(BeanProperty property) { return new UserTransactionPropertyInjector(property, ic); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } else { mappedName = "java:comp/UserTransaction"; } } //Injects a Sip Factory else if (resType.equals(SipFactory.class)) { final InjectionContainer ic = container; InjectorFactory<?> factory = new InjectorFactory<SipFactoryPropertyInjector>() { public SipFactoryPropertyInjector create(BeanProperty property) { return new SipFactoryPropertyInjector(property, ic); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } else { mappedName = "java:comp/env/sip/"+ ((SipContext)((TomcatConvergedSipInjectionContainer)container).getCatalinaContext()).getApplicationName() +"/SipFactory"; } } //Injects a SipSessionsUtil else if (resType.equals(SipSessionsUtil.class)) { final InjectionContainer ic = container; InjectorFactory<?> factory = new InjectorFactory<SipSessionsUtilPropertyInjector>() { public SipSessionsUtilPropertyInjector create(BeanProperty property) { return new SipSessionsUtilPropertyInjector(property, ic); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } else { mappedName = "java:comp/env/sip/"+ ((SipContext)((TomcatConvergedSipInjectionContainer)container).getCatalinaContext()).getApplicationName() +"/SipSessionsUtil"; } } //Injects a Timer Service else if (resType.equals(javax.servlet.sip.TimerService.class)) { final InjectionContainer ic = container; InjectorFactory<?> factory = new InjectorFactory<SipTimerServicePropertyInjector>() { public SipTimerServicePropertyInjector create(BeanProperty property) { return new SipTimerServicePropertyInjector(property, ic); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } else { mappedName = "java:comp/env/sip/"+ ((SipContext)((TomcatConvergedSipInjectionContainer)container).getCatalinaContext()).getApplicationName() +"/TimerService"; } } else if (resType.equals(ORB.class)) { mappedName = "java:comp/ORB"; continue; } else if(WebServiceContext.class.getName().equals(envRef.getType())) { // JBAS-5359 InjectorFactory<?> factory = new InjectorFactory<WebServiceContextPropertyInjector>() { public WebServiceContextPropertyInjector create(BeanProperty property) { return new WebServiceContextPropertyInjector(property); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } } } } catch(ClassNotFoundException e) { throw new EJBException(e); } String encName = "env/" + envRef.getResourceEnvRefName(); if (container.getEncInjectors().containsKey(encName)) continue; if (mappedName == null || mappedName.equals("")) { throw new RuntimeException("mapped-name is required for " + envRef.getResourceEnvRefName() + " of deployment " + container.getIdentifier()); } container.getEncInjectors().put(encName, new LinkRefEncInjector(encName, envRef.getMappedName(), "<resource-ref>")); InjectionUtil.injectionTarget(encName, envRef, container, container.getEncInjections()); } }
private static void loadXmlResourceEnvRefs(InjectionContainer container, Collection<ResourceEnvironmentReferenceMetaData> refs) { for (ResourceEnvironmentReferenceMetaData envRef : refs) { String resTypeName = envRef.getType(); String mappedName = envRef.getMappedName(); if(mappedName == null || mappedName.length() == 0) mappedName = envRef.getResolvedJndiName(); try { if(resTypeName != null) { Class<?> resType = Class.forName(resTypeName); if(TimerService.class.isAssignableFrom(resType)) { log.warn("Ignoring invalid TimerService resource-env-ref"); continue; } else if(SessionContext.class.isAssignableFrom(resType)) { log.warn("Ignoring invalid SessionContext resource-env-ref"); continue; } else if (resType.equals(UserTransaction.class)) { final InjectionContainer ic = container; InjectorFactory<?> factory = new InjectorFactory<UserTransactionPropertyInjector>() { public UserTransactionPropertyInjector create(BeanProperty property) { return new UserTransactionPropertyInjector(property, ic); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } else { mappedName = "java:comp/UserTransaction"; } } //Injects a Sip Factory else if (resType.equals(SipFactory.class)) { final InjectionContainer ic = container; InjectorFactory<?> factory = new InjectorFactory<SipFactoryPropertyInjector>() { public SipFactoryPropertyInjector create(BeanProperty property) { return new SipFactoryPropertyInjector(property, ic); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } else { mappedName = "java:comp/env/sip/"+ ((SipContext)((TomcatConvergedSipInjectionContainer)container).getCatalinaContext()).getApplicationName() +"/SipFactory"; } } //Injects a SipSessionsUtil else if (resType.equals(SipSessionsUtil.class)) { final InjectionContainer ic = container; InjectorFactory<?> factory = new InjectorFactory<SipSessionsUtilPropertyInjector>() { public SipSessionsUtilPropertyInjector create(BeanProperty property) { return new SipSessionsUtilPropertyInjector(property, ic); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } else { mappedName = "java:comp/env/sip/"+ ((SipContext)((TomcatConvergedSipInjectionContainer)container).getCatalinaContext()).getApplicationName() +"/SipSessionsUtil"; } } //Injects a Timer Service else if (resType.equals(javax.servlet.sip.TimerService.class)) { final InjectionContainer ic = container; InjectorFactory<?> factory = new InjectorFactory<SipTimerServicePropertyInjector>() { public SipTimerServicePropertyInjector create(BeanProperty property) { return new SipTimerServicePropertyInjector(property, ic); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } else { mappedName = "java:comp/env/sip/"+ ((SipContext)((TomcatConvergedSipInjectionContainer)container).getCatalinaContext()).getApplicationName() +"/TimerService"; } } else if (resType.equals(ORB.class)) { mappedName = "java:comp/ORB"; continue; } else if(WebServiceContext.class.getName().equals(envRef.getType())) { // JBAS-5359 InjectorFactory<?> factory = new InjectorFactory<WebServiceContextPropertyInjector>() { public WebServiceContextPropertyInjector create(BeanProperty property) { return new WebServiceContextPropertyInjector(property); } }; if(envRef.getInjectionTargets() != null) { TomcatInjectionUtils.createInjectors(container.getEncInjections(), container.getClassloader(), factory, envRef.getInjectionTargets()); continue; } } } } catch(ClassNotFoundException e) { if(log.isDebugEnabled()) { log.debug("ClassNotFoundException while trying to inject Resource. We can ignore this error. Other handlers will signal if something is wrong." + resTypeName); } return; } String encName = "env/" + envRef.getResourceEnvRefName(); if (container.getEncInjectors().containsKey(encName)) continue; if (mappedName == null || mappedName.equals("")) { throw new RuntimeException("mapped-name is required for " + envRef.getResourceEnvRefName() + " of deployment " + container.getIdentifier()); } container.getEncInjectors().put(encName, new LinkRefEncInjector(encName, envRef.getMappedName(), "<resource-ref>")); InjectionUtil.injectionTarget(encName, envRef, container, container.getEncInjections()); } }
diff --git a/src/rendering/OpenGLRendering.java b/src/rendering/OpenGLRendering.java index c1bed3f..ee15519 100644 --- a/src/rendering/OpenGLRendering.java +++ b/src/rendering/OpenGLRendering.java @@ -1,138 +1,138 @@ package rendering; import game.Game; import input.CanvasListener; import java.awt.AWTException; import java.awt.Robot; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import javax.media.nativewindow.util.Point; import javax.media.opengl.GLCapabilities; import javax.media.opengl.GLEventListener; import javax.media.opengl.GLProfile; import settings.Settings; import util.Log; import com.jogamp.newt.event.WindowAdapter; import com.jogamp.newt.event.WindowEvent; import com.jogamp.newt.opengl.GLWindow; public class OpenGLRendering { // private GLCanvas canvas; private static GLWindow window; private static Robot robot; private static Point screenLocation = new Point(); private static boolean fullscreen; static { try { robot = new Robot(); } catch (AWTException e) { e.printStackTrace(); } GLProfile.initSingleton(); } private boolean disposed; public OpenGLRendering(final GLEventListener r) { GLProfile glp = GLProfile.getDefault(); logAvailableImplementations(glp); GLCapabilities caps = new GLCapabilities(glp); caps.setDoubleBuffered(true); /* * frame = Util.createFrame(); canvas = new GLCanvas(caps); * canvas.setAutoSwapBufferMode(true); canvas.addGLEventListener(r); * Container pane = frame.getContentPane(); pane.setLayout(new * BoxLayout(pane, BoxLayout.X_AXIS)); canvas.setMaximumSize(new * Dimension( Settings.STEREO ? Settings.WIDTH * 2 : Settings.WIDTH, * Settings.HEIGHT)); canvas.setMinimumSize(new Dimension( * Settings.STEREO ? Settings.WIDTH * 2 : Settings.WIDTH, * Settings.HEIGHT)); // canvas.setPreferredSize(new Dimension(0, 0)); * pane.add(canvas); CanvasListener l = new CanvasListener(); * canvas.addMouseMotionListener(l); canvas.addMouseListener(l); * canvas.addMouseWheelListener(l); canvas.addKeyListener(l); Util.c = * canvas; frame.setVisible(true); */ window = GLWindow.create(caps); window.setSize(Settings.STEREO ? Settings.WIDTH * 2 : Settings.WIDTH, Settings.HEIGHT); window.addGLEventListener(r); setFullscreen(Settings.USE_FULL_SCREEN); window.setAlwaysOnTop(true); window.setAutoSwapBufferMode(true); window.setVisible(true); - window.setPosition(window.getScreen().getWidth() - window.getWidth(), 0); + window.setPosition(window.getScreen().getWidth() - window.getWidth(), 50); window.setTitle("fungine"); CanvasListener c = new CanvasListener(); window.addMouseListener(c); window.addKeyListener(c); window.addWindowListener(new WindowAdapter() { public void windowDestroyNotify(WindowEvent arg0) { Game.INSTANCE.exitFlag = true; }; }); } private void logAvailableImplementations(GLProfile glp) { StringBuilder sb = new StringBuilder( "available opengl implementations: "); for (Method m : glp.getClass().getMethods()) { if (m.getName().startsWith("isGL")) { try { if ((Boolean) m.invoke(glp)) { sb.append(m.getName().substring(2)); sb.append(", "); } } catch (IllegalAccessException e) { e.printStackTrace(); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } } } Log.log(this, sb.toString()); } public void dispose() { Log.log(this, "disposing"); disposed = true; GLProfile.shutdown(); Log.log(this, "disposed"); window.destroy(); } public void display() { if (!disposed) window.display(); } public static void hideMouse(boolean b) { window.setPointerVisible(!b); } public static void centerMouse() { screenLocation.setX(0); screenLocation.setY(0); screenLocation = window.getLocationOnScreen(screenLocation); robot.mouseMove(screenLocation.getX() + window.getWidth() / (Settings.STEREO ? 4 : 2), screenLocation.getY() + window.getHeight() / 2); } public static boolean isFullscreen() { return fullscreen; } public static void setFullscreen(boolean fullscreenFlag) { fullscreen = fullscreenFlag; window.setFullscreen(fullscreen); } }
true
true
public OpenGLRendering(final GLEventListener r) { GLProfile glp = GLProfile.getDefault(); logAvailableImplementations(glp); GLCapabilities caps = new GLCapabilities(glp); caps.setDoubleBuffered(true); /* * frame = Util.createFrame(); canvas = new GLCanvas(caps); * canvas.setAutoSwapBufferMode(true); canvas.addGLEventListener(r); * Container pane = frame.getContentPane(); pane.setLayout(new * BoxLayout(pane, BoxLayout.X_AXIS)); canvas.setMaximumSize(new * Dimension( Settings.STEREO ? Settings.WIDTH * 2 : Settings.WIDTH, * Settings.HEIGHT)); canvas.setMinimumSize(new Dimension( * Settings.STEREO ? Settings.WIDTH * 2 : Settings.WIDTH, * Settings.HEIGHT)); // canvas.setPreferredSize(new Dimension(0, 0)); * pane.add(canvas); CanvasListener l = new CanvasListener(); * canvas.addMouseMotionListener(l); canvas.addMouseListener(l); * canvas.addMouseWheelListener(l); canvas.addKeyListener(l); Util.c = * canvas; frame.setVisible(true); */ window = GLWindow.create(caps); window.setSize(Settings.STEREO ? Settings.WIDTH * 2 : Settings.WIDTH, Settings.HEIGHT); window.addGLEventListener(r); setFullscreen(Settings.USE_FULL_SCREEN); window.setAlwaysOnTop(true); window.setAutoSwapBufferMode(true); window.setVisible(true); window.setPosition(window.getScreen().getWidth() - window.getWidth(), 0); window.setTitle("fungine"); CanvasListener c = new CanvasListener(); window.addMouseListener(c); window.addKeyListener(c); window.addWindowListener(new WindowAdapter() { public void windowDestroyNotify(WindowEvent arg0) { Game.INSTANCE.exitFlag = true; }; }); }
public OpenGLRendering(final GLEventListener r) { GLProfile glp = GLProfile.getDefault(); logAvailableImplementations(glp); GLCapabilities caps = new GLCapabilities(glp); caps.setDoubleBuffered(true); /* * frame = Util.createFrame(); canvas = new GLCanvas(caps); * canvas.setAutoSwapBufferMode(true); canvas.addGLEventListener(r); * Container pane = frame.getContentPane(); pane.setLayout(new * BoxLayout(pane, BoxLayout.X_AXIS)); canvas.setMaximumSize(new * Dimension( Settings.STEREO ? Settings.WIDTH * 2 : Settings.WIDTH, * Settings.HEIGHT)); canvas.setMinimumSize(new Dimension( * Settings.STEREO ? Settings.WIDTH * 2 : Settings.WIDTH, * Settings.HEIGHT)); // canvas.setPreferredSize(new Dimension(0, 0)); * pane.add(canvas); CanvasListener l = new CanvasListener(); * canvas.addMouseMotionListener(l); canvas.addMouseListener(l); * canvas.addMouseWheelListener(l); canvas.addKeyListener(l); Util.c = * canvas; frame.setVisible(true); */ window = GLWindow.create(caps); window.setSize(Settings.STEREO ? Settings.WIDTH * 2 : Settings.WIDTH, Settings.HEIGHT); window.addGLEventListener(r); setFullscreen(Settings.USE_FULL_SCREEN); window.setAlwaysOnTop(true); window.setAutoSwapBufferMode(true); window.setVisible(true); window.setPosition(window.getScreen().getWidth() - window.getWidth(), 50); window.setTitle("fungine"); CanvasListener c = new CanvasListener(); window.addMouseListener(c); window.addKeyListener(c); window.addWindowListener(new WindowAdapter() { public void windowDestroyNotify(WindowEvent arg0) { Game.INSTANCE.exitFlag = true; }; }); }
diff --git a/portal-core/src/main/java/org/devproof/portal/core/module/user/panel/LoginBoxPanel.java b/portal-core/src/main/java/org/devproof/portal/core/module/user/panel/LoginBoxPanel.java index 837f9636..e91345aa 100644 --- a/portal-core/src/main/java/org/devproof/portal/core/module/user/panel/LoginBoxPanel.java +++ b/portal-core/src/main/java/org/devproof/portal/core/module/user/panel/LoginBoxPanel.java @@ -1,147 +1,151 @@ /* * Copyright 2009-2010 Carsten Hufe devproof.org * * Licensed under the Apache License, Version 2.0 (the "License") * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.devproof.portal.core.module.user.panel; import org.apache.wicket.Page; import org.apache.wicket.PageParameters; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.HiddenField; import org.apache.wicket.markup.html.form.PasswordTextField; import org.apache.wicket.markup.html.form.RequiredTextField; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.markup.html.link.BookmarkablePageLink; import org.apache.wicket.markup.html.pages.RedirectPage; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.CompoundPropertyModel; import org.apache.wicket.util.value.ValueMap; import org.devproof.portal.core.app.PortalApplication; import org.devproof.portal.core.app.PortalSession; import org.devproof.portal.core.module.box.panel.BoxTitleVisibility; import org.devproof.portal.core.module.common.page.MessagePage; import org.devproof.portal.core.module.user.exception.UserNotConfirmedException; import org.devproof.portal.core.module.user.page.ForgotPasswordPage; import org.devproof.portal.core.module.user.page.ReenterEmailPage; import org.devproof.portal.core.module.user.page.RegisterPage; /** * @author Carsten Hufe */ public class LoginBoxPanel extends Panel implements BoxTitleVisibility { private static final long serialVersionUID = 1L; private PageParameters params; private ValueMap valueMap; private WebMarkupContainer titleContainer; public LoginBoxPanel(String id, PageParameters params) { super(id); this.params = params; setValueMap(); add(createTitleContainer()); add(createLoginForm()); add(createRegisterLink()); add(createForgotPasswordLink()); } private Form<ValueMap> createLoginForm() { Form<ValueMap> form = newLoginForm(); form.add(createUsernameField()); form.add(createPasswordField()); form.add(createOptParamHiddenField()); return form; } private Form<ValueMap> newLoginForm() { return new Form<ValueMap>("loginForm", new CompoundPropertyModel<ValueMap>(valueMap)) { private static final long serialVersionUID = 1L; @Override protected void onSubmit() { String username = valueMap.getString("username"); String password = valueMap.getString("password"); PortalSession session = (PortalSession) getSession(); try { String message = session.authenticate(username, password); if (message == null) { info(getString("logged.in")); redirectToSamePage(); } else { error(getString(message)); } } catch (UserNotConfirmedException e) { setResponsePage(new ReenterEmailPage(valueMap.getString("username"))); } } private void redirectToSamePage() { // redirect to the same page so that the rights will be // rechecked! String optParam = valueMap.getString("optparam"); if (getPage() instanceof MessagePage) { MessagePage msgPage = (MessagePage) getPage(); String redirectUrl = msgPage.getRedirectURLAfterLogin(); if (redirectUrl != null) { setResponsePage(new RedirectPage(redirectUrl)); } else { @SuppressWarnings("unchecked") Class<? extends Page> homePage = ((PortalApplication) getApplication()).getHomePage(); setResponsePage(homePage); } } else { - setResponsePage(getPage().getClass(), new PageParameters("0=" + optParam)); + if (optParam == null) { + setResponsePage(getPage().getClass()); + } else { + setResponsePage(getPage().getClass(), new PageParameters("0=" + optParam)); + } } } }; } private HiddenField<String> createOptParamHiddenField() { // View for ArticleViewPage and OtherPageViewPage return new HiddenField<String>("optparam"); } private PasswordTextField createPasswordField() { return new PasswordTextField("password"); } private TextField<String> createUsernameField() { return new RequiredTextField<String>("username"); } private BookmarkablePageLink<Void> createForgotPasswordLink() { return new BookmarkablePageLink<Void>("forgotPasswordLink", ForgotPasswordPage.class); } private BookmarkablePageLink<Void> createRegisterLink() { return new BookmarkablePageLink<Void>("registerLink", RegisterPage.class); } private void setValueMap() { valueMap = new ValueMap(); valueMap.add("optparam", params.getString("0")); } private WebMarkupContainer createTitleContainer() { titleContainer = new WebMarkupContainer("title"); return titleContainer; } @Override public void setTitleVisible(boolean visible) { titleContainer.setVisible(visible); } }
true
true
private Form<ValueMap> newLoginForm() { return new Form<ValueMap>("loginForm", new CompoundPropertyModel<ValueMap>(valueMap)) { private static final long serialVersionUID = 1L; @Override protected void onSubmit() { String username = valueMap.getString("username"); String password = valueMap.getString("password"); PortalSession session = (PortalSession) getSession(); try { String message = session.authenticate(username, password); if (message == null) { info(getString("logged.in")); redirectToSamePage(); } else { error(getString(message)); } } catch (UserNotConfirmedException e) { setResponsePage(new ReenterEmailPage(valueMap.getString("username"))); } } private void redirectToSamePage() { // redirect to the same page so that the rights will be // rechecked! String optParam = valueMap.getString("optparam"); if (getPage() instanceof MessagePage) { MessagePage msgPage = (MessagePage) getPage(); String redirectUrl = msgPage.getRedirectURLAfterLogin(); if (redirectUrl != null) { setResponsePage(new RedirectPage(redirectUrl)); } else { @SuppressWarnings("unchecked") Class<? extends Page> homePage = ((PortalApplication) getApplication()).getHomePage(); setResponsePage(homePage); } } else { setResponsePage(getPage().getClass(), new PageParameters("0=" + optParam)); } } }; }
private Form<ValueMap> newLoginForm() { return new Form<ValueMap>("loginForm", new CompoundPropertyModel<ValueMap>(valueMap)) { private static final long serialVersionUID = 1L; @Override protected void onSubmit() { String username = valueMap.getString("username"); String password = valueMap.getString("password"); PortalSession session = (PortalSession) getSession(); try { String message = session.authenticate(username, password); if (message == null) { info(getString("logged.in")); redirectToSamePage(); } else { error(getString(message)); } } catch (UserNotConfirmedException e) { setResponsePage(new ReenterEmailPage(valueMap.getString("username"))); } } private void redirectToSamePage() { // redirect to the same page so that the rights will be // rechecked! String optParam = valueMap.getString("optparam"); if (getPage() instanceof MessagePage) { MessagePage msgPage = (MessagePage) getPage(); String redirectUrl = msgPage.getRedirectURLAfterLogin(); if (redirectUrl != null) { setResponsePage(new RedirectPage(redirectUrl)); } else { @SuppressWarnings("unchecked") Class<? extends Page> homePage = ((PortalApplication) getApplication()).getHomePage(); setResponsePage(homePage); } } else { if (optParam == null) { setResponsePage(getPage().getClass()); } else { setResponsePage(getPage().getClass(), new PageParameters("0=" + optParam)); } } } }; }
diff --git a/src/win32/classes/java/net/WinCEDatagramSocketImpl.java b/src/win32/classes/java/net/WinCEDatagramSocketImpl.java index 97e42234..9fcf6b98 100644 --- a/src/win32/classes/java/net/WinCEDatagramSocketImpl.java +++ b/src/win32/classes/java/net/WinCEDatagramSocketImpl.java @@ -1,93 +1,94 @@ /* * @(#)WinCEDatagramSocketImpl.java 1.6 06/10/10 * * Copyright 1990-2006 Sun Microsystems, Inc. All Rights Reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License version * 2 only, as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License version 2 for more details (a copy is * included at /legal/license.txt). * * You should have received a copy of the GNU General Public License * version 2 along with this work; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA * * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa * Clara, CA 95054 or visit www.sun.com if you need additional * information or have any questions. * */ package java.net; import java.io.FileDescriptor; import java.io.IOException; import java.io.InterruptedIOException; class WinCEDatagramSocketImpl extends PlainDatagramSocketImpl { private DatagramPacket peekPacket = null; private int bufLength = 0; protected synchronized void create() throws SocketException { super.create(); bufLength = ((Integer)getOption(SO_RCVBUF)).intValue(); } protected synchronized int peek(InetAddress i) throws IOException { if (peekPacket == null) { int len = bufLength; DatagramPacket p = new DatagramPacket(new byte[len], len); receive(p); peekPacket = p; } i.address = peekPacket.getAddress().address; i.family = peekPacket.getAddress().family; return peekPacket.getPort(); } protected synchronized int peekData(DatagramPacket pd) throws IOException { if (peekPacket == null) { int len = bufLength; DatagramPacket p = new DatagramPacket(new byte[len], len); receive(p); peekPacket = p; } int peeklen = Math.min(pd.getLength(), peekPacket.getLength()); System.arraycopy(peekPacket.getData(), peekPacket.getOffset(), pd.getData(), pd.getOffset(), peeklen); pd.setLength(peeklen); pd.setAddress(peekPacket.getAddress()); + pd.setPort(peekPacket.getPort()); return peekPacket.getPort(); } protected synchronized void receive(DatagramPacket p) throws IOException { if (peekPacket == null) { super.receive(p); } else { p.setPort(peekPacket.getPort()); p.setAddress(peekPacket.getAddress()); int len = Math.min(peekPacket.getLength(), p.getLength()); System.arraycopy(peekPacket.getData(), peekPacket.getOffset(), p.getData(), p.getOffset(), len); p.setLength(len); peekPacket = null; } } public void setOption(int optID, Object o) throws SocketException { super.setOption(optID, o); if (optID == SO_RCVBUF) { bufLength = ((Integer)getOption(SO_RCVBUF)).intValue(); } } }
true
true
protected synchronized int peekData(DatagramPacket pd) throws IOException { if (peekPacket == null) { int len = bufLength; DatagramPacket p = new DatagramPacket(new byte[len], len); receive(p); peekPacket = p; } int peeklen = Math.min(pd.getLength(), peekPacket.getLength()); System.arraycopy(peekPacket.getData(), peekPacket.getOffset(), pd.getData(), pd.getOffset(), peeklen); pd.setLength(peeklen); pd.setAddress(peekPacket.getAddress()); return peekPacket.getPort(); }
protected synchronized int peekData(DatagramPacket pd) throws IOException { if (peekPacket == null) { int len = bufLength; DatagramPacket p = new DatagramPacket(new byte[len], len); receive(p); peekPacket = p; } int peeklen = Math.min(pd.getLength(), peekPacket.getLength()); System.arraycopy(peekPacket.getData(), peekPacket.getOffset(), pd.getData(), pd.getOffset(), peeklen); pd.setLength(peeklen); pd.setAddress(peekPacket.getAddress()); pd.setPort(peekPacket.getPort()); return peekPacket.getPort(); }
diff --git a/src/share/src/main/java/cz/filmtit/share/parsing/Parser.java b/src/share/src/main/java/cz/filmtit/share/parsing/Parser.java index 682fca63..96c0b2b0 100644 --- a/src/share/src/main/java/cz/filmtit/share/parsing/Parser.java +++ b/src/share/src/main/java/cz/filmtit/share/parsing/Parser.java @@ -1,115 +1,115 @@ package cz.filmtit.share.parsing; import java.util.LinkedList; import java.util.ArrayList; import java.util.List; import cz.filmtit.share.TimedChunk; import cz.filmtit.share.TitChunkSeparator; import cz.filmtit.share.Language; import cz.filmtit.share.annotations.*; import com.google.gwt.regexp.shared.RegExp; import com.google.gwt.regexp.shared.MatchResult; import com.google.gwt.regexp.shared.SplitResult; /** * Interface for parsing a subtitle file, * intended primarily as a base class * for ParserSrt (parsing .srt files) * and ParserSub (.sub files) * * @author Honza Václ * */ public abstract class Parser { public static final String SUBLINE_SEPARATOR_IN = "\\|"; public static final String SUBLINE_SEPARATOR_OUT = " | "; public static final String EMPTY_STRING = ""; public static final String LINE_SEPARATOR = "\r?\n"; public static final String SUBLINE_SEPARATOR_OUT_REGEXP = "( |^)\\|( |$)"; public static final RegExp dialogueMatch = RegExp.compile("^ ?- ?"); //TODO - better solution //(will need to rewrite the AnnotationType from scratch I am afraid) //temporary solution - ignore all HTML-like tags public static final RegExp formatMatch = RegExp.compile("<[^>]*>", "g"); public abstract List<UnprocessedChunk> parseUnprocessed(String text); public List<TimedChunk> parse(String text, long documentId, Language l) { return processChunks(parseUnprocessed(text), documentId, l); } public static List<TimedChunk> processChunks(List<UnprocessedChunk> chunks, long documentId, Language l) { LinkedList<TimedChunk> result = new LinkedList<TimedChunk>(); int chunkId = 0; for (UnprocessedChunk chunk: chunks){ result.addAll(processChunk(chunk, chunkId, documentId, l)); chunkId++; } renumber(result); return result; } public static void renumber (List<TimedChunk> what) { int i = 0; for (TimedChunk chunk:what) { chunk.setIndex(i); i++; } } public static LinkedList<TimedChunk> processChunk(UnprocessedChunk chunk, int chunkId, long documentId, Language l) { LinkedList<TimedChunk> result = new LinkedList<TimedChunk>(); //separate into sentences List<String> separatedText = TitChunkSeparator.separate(chunk.getText(), l); int partNumber = 1; for (String chunkText : separatedText) { chunkText = formatMatch.replace(chunkText, ""); List<Annotation> annotations = new ArrayList<Annotation>(); //if it is a dialogue, mark it as such in annotations if (dialogueMatch.test(chunkText)) { chunkText = dialogueMatch.replace(chunkText, ""); annotations.add(new Annotation(AnnotationType.DIALOGUE, 0, 0)); } //add linebreaks as annotations RegExp sublineRegexp = RegExp.compile(SUBLINE_SEPARATOR_OUT_REGEXP, "g"); MatchResult sublineResult = sublineRegexp.exec(chunkText); while (sublineResult != null) { int index = sublineResult.getIndex(); //not sure about off-by-one errors String newChunkText = chunkText.substring(0, index); if (index+3 < chunkText.length()) { - newChunkText = newChunkText + " "+chunkText.substring(index+3, chunkText.length()); + newChunkText = newChunkText + " "+chunkText.substring(index+2, chunkText.length()); } chunkText = newChunkText; if (index != 0) { annotations.add(new Annotation(AnnotationType.LINEBREAK, index, index)); } sublineResult = sublineRegexp.exec(chunkText); } //create a new timedchunk TimedChunk newChunk = new TimedChunk(chunk.getStartTime(), chunk.getEndTime(), partNumber, chunkText, chunkId, documentId); newChunk.addAnnotations(annotations); result.add( newChunk); partNumber++; } return result; } }
true
true
public static LinkedList<TimedChunk> processChunk(UnprocessedChunk chunk, int chunkId, long documentId, Language l) { LinkedList<TimedChunk> result = new LinkedList<TimedChunk>(); //separate into sentences List<String> separatedText = TitChunkSeparator.separate(chunk.getText(), l); int partNumber = 1; for (String chunkText : separatedText) { chunkText = formatMatch.replace(chunkText, ""); List<Annotation> annotations = new ArrayList<Annotation>(); //if it is a dialogue, mark it as such in annotations if (dialogueMatch.test(chunkText)) { chunkText = dialogueMatch.replace(chunkText, ""); annotations.add(new Annotation(AnnotationType.DIALOGUE, 0, 0)); } //add linebreaks as annotations RegExp sublineRegexp = RegExp.compile(SUBLINE_SEPARATOR_OUT_REGEXP, "g"); MatchResult sublineResult = sublineRegexp.exec(chunkText); while (sublineResult != null) { int index = sublineResult.getIndex(); //not sure about off-by-one errors String newChunkText = chunkText.substring(0, index); if (index+3 < chunkText.length()) { newChunkText = newChunkText + " "+chunkText.substring(index+3, chunkText.length()); } chunkText = newChunkText; if (index != 0) { annotations.add(new Annotation(AnnotationType.LINEBREAK, index, index)); } sublineResult = sublineRegexp.exec(chunkText); } //create a new timedchunk TimedChunk newChunk = new TimedChunk(chunk.getStartTime(), chunk.getEndTime(), partNumber, chunkText, chunkId, documentId); newChunk.addAnnotations(annotations); result.add( newChunk); partNumber++; } return result; }
public static LinkedList<TimedChunk> processChunk(UnprocessedChunk chunk, int chunkId, long documentId, Language l) { LinkedList<TimedChunk> result = new LinkedList<TimedChunk>(); //separate into sentences List<String> separatedText = TitChunkSeparator.separate(chunk.getText(), l); int partNumber = 1; for (String chunkText : separatedText) { chunkText = formatMatch.replace(chunkText, ""); List<Annotation> annotations = new ArrayList<Annotation>(); //if it is a dialogue, mark it as such in annotations if (dialogueMatch.test(chunkText)) { chunkText = dialogueMatch.replace(chunkText, ""); annotations.add(new Annotation(AnnotationType.DIALOGUE, 0, 0)); } //add linebreaks as annotations RegExp sublineRegexp = RegExp.compile(SUBLINE_SEPARATOR_OUT_REGEXP, "g"); MatchResult sublineResult = sublineRegexp.exec(chunkText); while (sublineResult != null) { int index = sublineResult.getIndex(); //not sure about off-by-one errors String newChunkText = chunkText.substring(0, index); if (index+3 < chunkText.length()) { newChunkText = newChunkText + " "+chunkText.substring(index+2, chunkText.length()); } chunkText = newChunkText; if (index != 0) { annotations.add(new Annotation(AnnotationType.LINEBREAK, index, index)); } sublineResult = sublineRegexp.exec(chunkText); } //create a new timedchunk TimedChunk newChunk = new TimedChunk(chunk.getStartTime(), chunk.getEndTime(), partNumber, chunkText, chunkId, documentId); newChunk.addAnnotations(annotations); result.add( newChunk); partNumber++; } return result; }
diff --git a/CASSP/src/cassp/ea/SSPEA.java b/CASSP/src/cassp/ea/SSPEA.java index 10a74aa..1cc27f0 100644 --- a/CASSP/src/cassp/ea/SSPEA.java +++ b/CASSP/src/cassp/ea/SSPEA.java @@ -1,146 +1,147 @@ /** * SSPEA.java * * Copyright (c) 2013 Vladimir Brigant * This software is distributed under the terms of the GNU General Public License. */ package cassp.ea; import java.io.*; import java.util.*; import org.jgap.*; import org.jgap.data.*; import org.jgap.impl.*; import org.jgap.xml.*; import org.jgap.event.*; import org.jgap.util.*; import org.w3c.dom.*; import org.apache.log4j.*; import cassp.*; import cassp.ca.*; import cassp.data.*; import cassp.utils.*; import cassp.config.*; import cassp.ca.rules.*; import cassp.ea.stats.*; /* Notes: - default mutation rate - 1/12 (1/X) - default crossover rate - 35 % from pop size - http://vyuka.martinpilat.com/2011/11/09/eva-5-cviceni-realna-funkce-ii-operatory/ */ /** * Secondary Structure Prediction Evolutionary Algorithm. */ public class SSPEA { static Logger logger = Logger.getLogger(SSPEA.class); private Data data; private SimConfig config; private EAStats stats; public SSPEA(SimConfig config, Data data){ this.data = data; this.config = config; this.stats = new EAStats(config.getNoChangeEAEnd()); } public CARule evolve() throws Exception{ // EA configuration Configuration.reset(); Configuration conf = new Configuration("conf"); conf.setBreeder(new GABreeder()); RandomGenerator rg = new GaussianRndGenerator(this.config.getMutDev()); conf.setRandomGenerator(rg); conf.setEventManager(new EventManager()); BestChromosomesSelector bestChromsSelector = new BestChromosomesSelector(conf, 0.90d); bestChromsSelector.setDoubletteChromosomesAllowed(true); conf.addNaturalSelector(bestChromsSelector, false); conf.addNaturalSelector(new WeightedRouletteSelector(conf), false); conf.setMinimumPopSizePercent(0); conf.setSelectFromPrevGen(1.0d); conf.setKeepPopulationSizeConstant(true); conf.setFitnessEvaluator(new DefaultFitnessEvaluator()); conf.setChromosomePool(new ChromosomePool()); conf.addGeneticOperator(new CrossoverOperator(conf, this.config.getCrossProb())); conf.addGeneticOperator(new GaussianMutationOperator(conf, this.config.getMutDev())); conf.setPreservFittestIndividual(true); FitnessFunction ff = new SSPFF(this.data, this.config); conf.setFitnessFunction(ff); CARule rule = this.setRule(); //conf.addGeneticOperator(new MutationOperator(conf, rule.getSize()/2)); IChromosome sampleChromosome = rule.initChromosome(conf, this.config.getMaxSteps()); conf.setSampleChromosome(sampleChromosome); conf.setPopulationSize(this.config.getPop()); //genotype = Genotype.randomInitialGenotype(conf); Genotype genotype = this.initGenotype(conf); // population evolving for (int i = 0; i < this.config.getMaxGen(); i++) { - logger.info("### " + i + ". generation"); genotype.evolve(); // filling GenStats object GenStats gs = new GenStats(); gs.setMax(Utils.getMax(genotype.getPopulation())); - logger.info(Utils.getMax(genotype.getPopulation())); gs.setMin(Utils.getMin(genotype.getPopulation())); gs.setMean(Utils.getMean(genotype.getPopulation())); gs.setGeneration(i); this.stats.addGenStats(gs); + logger.info("## " + i + ". generation: " + genotype.getFittestChromosome().getFitnessValue()); if (this.stats.isConverged()) break; } - return rule.fromChromosome(genotype.getFittestChromosome()); + CARule bestRule = rule.fromChromosome(genotype.getFittestChromosome()); + bestRule.computeMaxPropsDiff(new double[]{this.data.getMaxCF(), this.data.getMaxCC()}); + return rule; } /** * Initialize EA genotype. */ private Genotype initGenotype(Configuration conf){ Genotype genotype = null; if (this.config.getRule() == SimConfig.RULE_SIMPLE) genotype = CASimpleRule.initGenotypeGauss(conf); else if (this.config.getRule() == SimConfig.RULE_CONFORM) genotype = CAConformRule.initGenotypeGauss(conf); return genotype; } /* Getters & setters */ private CARule setRule(){ if (this.config.getRule() == SimConfig.RULE_SIMPLE) return new CASimpleRule(this.config.getNeigh(), this.data.getAminoAcids()); else if (this.config.getRule() == SimConfig.RULE_CONFORM) return new CAConformRule(this.config.getNeigh(), this.data.getAminoAcids()); else return new CASimpleRule(this.config.getNeigh(), this.data.getAminoAcids()); } public EAStats getStats(){ return this.stats; } }
false
true
public CARule evolve() throws Exception{ // EA configuration Configuration.reset(); Configuration conf = new Configuration("conf"); conf.setBreeder(new GABreeder()); RandomGenerator rg = new GaussianRndGenerator(this.config.getMutDev()); conf.setRandomGenerator(rg); conf.setEventManager(new EventManager()); BestChromosomesSelector bestChromsSelector = new BestChromosomesSelector(conf, 0.90d); bestChromsSelector.setDoubletteChromosomesAllowed(true); conf.addNaturalSelector(bestChromsSelector, false); conf.addNaturalSelector(new WeightedRouletteSelector(conf), false); conf.setMinimumPopSizePercent(0); conf.setSelectFromPrevGen(1.0d); conf.setKeepPopulationSizeConstant(true); conf.setFitnessEvaluator(new DefaultFitnessEvaluator()); conf.setChromosomePool(new ChromosomePool()); conf.addGeneticOperator(new CrossoverOperator(conf, this.config.getCrossProb())); conf.addGeneticOperator(new GaussianMutationOperator(conf, this.config.getMutDev())); conf.setPreservFittestIndividual(true); FitnessFunction ff = new SSPFF(this.data, this.config); conf.setFitnessFunction(ff); CARule rule = this.setRule(); //conf.addGeneticOperator(new MutationOperator(conf, rule.getSize()/2)); IChromosome sampleChromosome = rule.initChromosome(conf, this.config.getMaxSteps()); conf.setSampleChromosome(sampleChromosome); conf.setPopulationSize(this.config.getPop()); //genotype = Genotype.randomInitialGenotype(conf); Genotype genotype = this.initGenotype(conf); // population evolving for (int i = 0; i < this.config.getMaxGen(); i++) { logger.info("### " + i + ". generation"); genotype.evolve(); // filling GenStats object GenStats gs = new GenStats(); gs.setMax(Utils.getMax(genotype.getPopulation())); logger.info(Utils.getMax(genotype.getPopulation())); gs.setMin(Utils.getMin(genotype.getPopulation())); gs.setMean(Utils.getMean(genotype.getPopulation())); gs.setGeneration(i); this.stats.addGenStats(gs); if (this.stats.isConverged()) break; } return rule.fromChromosome(genotype.getFittestChromosome()); }
public CARule evolve() throws Exception{ // EA configuration Configuration.reset(); Configuration conf = new Configuration("conf"); conf.setBreeder(new GABreeder()); RandomGenerator rg = new GaussianRndGenerator(this.config.getMutDev()); conf.setRandomGenerator(rg); conf.setEventManager(new EventManager()); BestChromosomesSelector bestChromsSelector = new BestChromosomesSelector(conf, 0.90d); bestChromsSelector.setDoubletteChromosomesAllowed(true); conf.addNaturalSelector(bestChromsSelector, false); conf.addNaturalSelector(new WeightedRouletteSelector(conf), false); conf.setMinimumPopSizePercent(0); conf.setSelectFromPrevGen(1.0d); conf.setKeepPopulationSizeConstant(true); conf.setFitnessEvaluator(new DefaultFitnessEvaluator()); conf.setChromosomePool(new ChromosomePool()); conf.addGeneticOperator(new CrossoverOperator(conf, this.config.getCrossProb())); conf.addGeneticOperator(new GaussianMutationOperator(conf, this.config.getMutDev())); conf.setPreservFittestIndividual(true); FitnessFunction ff = new SSPFF(this.data, this.config); conf.setFitnessFunction(ff); CARule rule = this.setRule(); //conf.addGeneticOperator(new MutationOperator(conf, rule.getSize()/2)); IChromosome sampleChromosome = rule.initChromosome(conf, this.config.getMaxSteps()); conf.setSampleChromosome(sampleChromosome); conf.setPopulationSize(this.config.getPop()); //genotype = Genotype.randomInitialGenotype(conf); Genotype genotype = this.initGenotype(conf); // population evolving for (int i = 0; i < this.config.getMaxGen(); i++) { genotype.evolve(); // filling GenStats object GenStats gs = new GenStats(); gs.setMax(Utils.getMax(genotype.getPopulation())); gs.setMin(Utils.getMin(genotype.getPopulation())); gs.setMean(Utils.getMean(genotype.getPopulation())); gs.setGeneration(i); this.stats.addGenStats(gs); logger.info("## " + i + ". generation: " + genotype.getFittestChromosome().getFitnessValue()); if (this.stats.isConverged()) break; } CARule bestRule = rule.fromChromosome(genotype.getFittestChromosome()); bestRule.computeMaxPropsDiff(new double[]{this.data.getMaxCF(), this.data.getMaxCC()}); return rule; }
diff --git a/src/com/android/mms/ui/MessageItem.java b/src/com/android/mms/ui/MessageItem.java index f4fe868..85f97c6 100644 --- a/src/com/android/mms/ui/MessageItem.java +++ b/src/com/android/mms/ui/MessageItem.java @@ -1,333 +1,333 @@ /* * Copyright (C) 2008 Esmertec AG. * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.mms.ui; import java.util.regex.Pattern; import com.android.mms.R; import com.android.mms.data.Contact; import com.android.mms.model.SlideModel; import com.android.mms.model.SlideshowModel; import com.android.mms.model.TextModel; import com.android.mms.ui.MessageListAdapter.ColumnsMap; import com.android.mms.util.AddressUtils; import com.google.android.mms.MmsException; import com.google.android.mms.pdu.EncodedStringValue; import com.google.android.mms.pdu.MultimediaMessagePdu; import com.google.android.mms.pdu.NotificationInd; import com.google.android.mms.pdu.PduHeaders; import com.google.android.mms.pdu.PduPersister; import com.google.android.mms.pdu.RetrieveConf; import com.google.android.mms.pdu.SendReq; import android.content.ContentUris; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.provider.Telephony.Mms; import android.provider.Telephony.MmsSms; import android.provider.Telephony.Sms; import android.text.TextUtils; import android.util.Log; /** * Mostly immutable model for an SMS/MMS message. * * <p>The only mutable field is the cached formatted message member, * the formatting of which is done outside this model in MessageListItem. */ public class MessageItem { private static String TAG = "MessageItem"; public enum DeliveryStatus { NONE, INFO, FAILED, PENDING, RECEIVED } final Context mContext; final String mType; final long mMsgId; final int mBoxId; DeliveryStatus mDeliveryStatus; boolean mReadReport; boolean mLocked; // locked to prevent auto-deletion String mTimestamp; String mAddress; String mContact; String mBody; // Body of SMS, first text of MMS. String mTextContentType; // ContentType of text of MMS. Pattern mHighlight; // portion of message to highlight (from search) // The only non-immutable field. Not synchronized, as access will // only be from the main GUI thread. Worst case if accessed from // another thread is it'll return null and be set again from that // thread. CharSequence mCachedFormattedMessage; // The last message is cached above in mCachedFormattedMessage. In the latest design, we // show "Sending..." in place of the timestamp when a message is being sent. mLastSendingState // is used to keep track of the last sending state so that if the current sending state is // different, we can clear the message cache so it will get rebuilt and recached. boolean mLastSendingState; // Fields for MMS only. Uri mMessageUri; int mMessageType; int mAttachmentType; String mSubject; SlideshowModel mSlideshow; int mMessageSize; int mErrorType; int mErrorCode; boolean mFullTimestamp; boolean mSentTimestamp; MessageItem(Context context, String type, Cursor cursor, ColumnsMap columnsMap, Pattern highlight, boolean fullTimestamp, boolean sentTimestamp) throws MmsException { mContext = context; mMsgId = cursor.getLong(columnsMap.mColumnMsgId); mHighlight = highlight; mType = type; mFullTimestamp = fullTimestamp; mSentTimestamp = sentTimestamp; if ("sms".equals(type)) { mReadReport = false; // No read reports in sms long status = cursor.getLong(columnsMap.mColumnSmsStatus); if (status == Sms.STATUS_NONE) { // No delivery report requested mDeliveryStatus = DeliveryStatus.NONE; } else if (status >= Sms.STATUS_FAILED) { // Failure mDeliveryStatus = DeliveryStatus.FAILED; } else if (status >= Sms.STATUS_PENDING) { // Pending mDeliveryStatus = DeliveryStatus.PENDING; } else { // Success mDeliveryStatus = DeliveryStatus.RECEIVED; } mMessageUri = ContentUris.withAppendedId(Sms.CONTENT_URI, mMsgId); // Set contact and message body mBoxId = cursor.getInt(columnsMap.mColumnSmsType); mAddress = cursor.getString(columnsMap.mColumnSmsAddress); if (Sms.isOutgoingFolder(mBoxId)) { String meString = context.getString( R.string.messagelist_sender_self); mContact = meString; } else { // For incoming messages, the ADDRESS field contains the sender. mContact = Contact.get(mAddress, false).getName(); } mBody = cursor.getString(columnsMap.mColumnSmsBody); // Unless the message is currently in the progress of being sent, it gets a time stamp. if (!isOutgoingMessage()) { // Set "received" or "sent" time stamp long date = cursor.getLong(columnsMap.mColumnSmsDate); - if (mSentTimestamp && mType.equals(Sms.MESSAGE_TYPE_INBOX)) { + if (mSentTimestamp && (mBoxId == Sms.MESSAGE_TYPE_INBOX)) { date = cursor.getLong(columnsMap.mColumnSmsDateSent); } mTimestamp = MessageUtils.formatTimeStampString(context, date, mFullTimestamp); } mLocked = cursor.getInt(columnsMap.mColumnSmsLocked) != 0; mErrorCode = cursor.getInt(columnsMap.mColumnSmsErrorCode); } else if ("mms".equals(type)) { mMessageUri = ContentUris.withAppendedId(Mms.CONTENT_URI, mMsgId); mBoxId = cursor.getInt(columnsMap.mColumnMmsMessageBox); mMessageType = cursor.getInt(columnsMap.mColumnMmsMessageType); mErrorType = cursor.getInt(columnsMap.mColumnMmsErrorType); String subject = cursor.getString(columnsMap.mColumnMmsSubject); if (!TextUtils.isEmpty(subject)) { EncodedStringValue v = new EncodedStringValue( cursor.getInt(columnsMap.mColumnMmsSubjectCharset), PduPersister.getBytes(subject)); mSubject = v.getString(); } mLocked = cursor.getInt(columnsMap.mColumnMmsLocked) != 0; long timestamp = 0L; PduPersister p = PduPersister.getPduPersister(mContext); if (PduHeaders.MESSAGE_TYPE_NOTIFICATION_IND == mMessageType) { mDeliveryStatus = DeliveryStatus.NONE; NotificationInd notifInd = (NotificationInd) p.load(mMessageUri); interpretFrom(notifInd.getFrom(), mMessageUri); // Borrow the mBody to hold the URL of the message. mBody = new String(notifInd.getContentLocation()); mMessageSize = (int) notifInd.getMessageSize(); timestamp = notifInd.getExpiry() * 1000L; } else { MultimediaMessagePdu msg = (MultimediaMessagePdu) p.load(mMessageUri); mSlideshow = SlideshowModel.createFromPduBody(context, msg.getBody()); mAttachmentType = MessageUtils.getAttachmentType(mSlideshow); if (mMessageType == PduHeaders.MESSAGE_TYPE_RETRIEVE_CONF) { RetrieveConf retrieveConf = (RetrieveConf) msg; interpretFrom(retrieveConf.getFrom(), mMessageUri); timestamp = retrieveConf.getDate() * 1000L; } else { // Use constant string for outgoing messages mContact = mAddress = context.getString(R.string.messagelist_sender_self); timestamp = ((SendReq) msg).getDate() * 1000L; } String report = cursor.getString(columnsMap.mColumnMmsDeliveryReport); if ((report == null) || !mAddress.equals(context.getString( R.string.messagelist_sender_self))) { mDeliveryStatus = DeliveryStatus.NONE; } else { int reportInt; try { reportInt = Integer.parseInt(report); if (reportInt == PduHeaders.VALUE_YES) { mDeliveryStatus = DeliveryStatus.RECEIVED; } else { mDeliveryStatus = DeliveryStatus.NONE; } } catch (NumberFormatException nfe) { Log.e(TAG, "Value for delivery report was invalid."); mDeliveryStatus = DeliveryStatus.NONE; } } report = cursor.getString(columnsMap.mColumnMmsReadReport); if ((report == null) || !mAddress.equals(context.getString( R.string.messagelist_sender_self))) { mReadReport = false; } else { int reportInt; try { reportInt = Integer.parseInt(report); mReadReport = (reportInt == PduHeaders.VALUE_YES); } catch (NumberFormatException nfe) { Log.e(TAG, "Value for read report was invalid."); mReadReport = false; } } SlideModel slide = mSlideshow.get(0); if ((slide != null) && slide.hasText()) { TextModel tm = slide.getText(); if (tm.isDrmProtected()) { mBody = mContext.getString(R.string.drm_protected_text); } else { mBody = tm.getText(); } mTextContentType = tm.getContentType(); } mMessageSize = mSlideshow.getTotalMessageSize(); } if (!isOutgoingMessage()) { if (PduHeaders.MESSAGE_TYPE_NOTIFICATION_IND == mMessageType) { mTimestamp = context.getString(R.string.expire_on, MessageUtils.formatTimeStampString(context, timestamp, mFullTimestamp)); } else { mTimestamp = MessageUtils.formatTimeStampString(context, timestamp, mFullTimestamp); } } } else { throw new MmsException("Unknown type of the message: " + type); } } private void interpretFrom(EncodedStringValue from, Uri messageUri) { if (from != null) { mAddress = from.getString(); } else { // In the rare case when getting the "from" address from the pdu fails, // (e.g. from == null) fall back to a slower, yet more reliable method of // getting the address from the "addr" table. This is what the Messaging // notification system uses. mAddress = AddressUtils.getFrom(mContext, messageUri); } mContact = TextUtils.isEmpty(mAddress) ? "" : Contact.get(mAddress, false).getName(); } public boolean isMms() { return mType.equals("mms"); } public boolean isSms() { return mType.equals("sms"); } public boolean isDownloaded() { return (mMessageType != PduHeaders.MESSAGE_TYPE_NOTIFICATION_IND); } public boolean isOutgoingMessage() { boolean isOutgoingMms = isMms() && (mBoxId == Mms.MESSAGE_BOX_OUTBOX); boolean isOutgoingSms = isSms() && ((mBoxId == Sms.MESSAGE_TYPE_FAILED) || (mBoxId == Sms.MESSAGE_TYPE_OUTBOX) || (mBoxId == Sms.MESSAGE_TYPE_QUEUED)); return isOutgoingMms || isOutgoingSms; } public boolean isSending() { return !isFailedMessage() && isOutgoingMessage(); } public boolean isFailedMessage() { boolean isFailedMms = isMms() && (mErrorType >= MmsSms.ERR_TYPE_GENERIC_PERMANENT); boolean isFailedSms = isSms() && (mBoxId == Sms.MESSAGE_TYPE_FAILED); return isFailedMms || isFailedSms; } // Note: This is the only mutable field in this class. Think of // mCachedFormattedMessage as a C++ 'mutable' field on a const // object, with this being a lazy accessor whose logic to set it // is outside the class for model/view separation reasons. In any // case, please keep this class conceptually immutable. public void setCachedFormattedMessage(CharSequence formattedMessage) { mCachedFormattedMessage = formattedMessage; } public CharSequence getCachedFormattedMessage() { boolean isSending = isSending(); if (isSending != mLastSendingState) { mLastSendingState = isSending; mCachedFormattedMessage = null; // clear cache so we'll rebuild the message // to show "Sending..." or the sent date. } return mCachedFormattedMessage; } public int getBoxId() { return mBoxId; } @Override public String toString() { return "type: " + mType + " box: " + mBoxId + " uri: " + mMessageUri + " address: " + mAddress + " contact: " + mContact + " read: " + mReadReport + " delivery status: " + mDeliveryStatus; } }
true
true
MessageItem(Context context, String type, Cursor cursor, ColumnsMap columnsMap, Pattern highlight, boolean fullTimestamp, boolean sentTimestamp) throws MmsException { mContext = context; mMsgId = cursor.getLong(columnsMap.mColumnMsgId); mHighlight = highlight; mType = type; mFullTimestamp = fullTimestamp; mSentTimestamp = sentTimestamp; if ("sms".equals(type)) { mReadReport = false; // No read reports in sms long status = cursor.getLong(columnsMap.mColumnSmsStatus); if (status == Sms.STATUS_NONE) { // No delivery report requested mDeliveryStatus = DeliveryStatus.NONE; } else if (status >= Sms.STATUS_FAILED) { // Failure mDeliveryStatus = DeliveryStatus.FAILED; } else if (status >= Sms.STATUS_PENDING) { // Pending mDeliveryStatus = DeliveryStatus.PENDING; } else { // Success mDeliveryStatus = DeliveryStatus.RECEIVED; } mMessageUri = ContentUris.withAppendedId(Sms.CONTENT_URI, mMsgId); // Set contact and message body mBoxId = cursor.getInt(columnsMap.mColumnSmsType); mAddress = cursor.getString(columnsMap.mColumnSmsAddress); if (Sms.isOutgoingFolder(mBoxId)) { String meString = context.getString( R.string.messagelist_sender_self); mContact = meString; } else { // For incoming messages, the ADDRESS field contains the sender. mContact = Contact.get(mAddress, false).getName(); } mBody = cursor.getString(columnsMap.mColumnSmsBody); // Unless the message is currently in the progress of being sent, it gets a time stamp. if (!isOutgoingMessage()) { // Set "received" or "sent" time stamp long date = cursor.getLong(columnsMap.mColumnSmsDate); if (mSentTimestamp && mType.equals(Sms.MESSAGE_TYPE_INBOX)) { date = cursor.getLong(columnsMap.mColumnSmsDateSent); } mTimestamp = MessageUtils.formatTimeStampString(context, date, mFullTimestamp); } mLocked = cursor.getInt(columnsMap.mColumnSmsLocked) != 0; mErrorCode = cursor.getInt(columnsMap.mColumnSmsErrorCode); } else if ("mms".equals(type)) { mMessageUri = ContentUris.withAppendedId(Mms.CONTENT_URI, mMsgId); mBoxId = cursor.getInt(columnsMap.mColumnMmsMessageBox); mMessageType = cursor.getInt(columnsMap.mColumnMmsMessageType); mErrorType = cursor.getInt(columnsMap.mColumnMmsErrorType); String subject = cursor.getString(columnsMap.mColumnMmsSubject); if (!TextUtils.isEmpty(subject)) { EncodedStringValue v = new EncodedStringValue( cursor.getInt(columnsMap.mColumnMmsSubjectCharset), PduPersister.getBytes(subject)); mSubject = v.getString(); } mLocked = cursor.getInt(columnsMap.mColumnMmsLocked) != 0; long timestamp = 0L; PduPersister p = PduPersister.getPduPersister(mContext); if (PduHeaders.MESSAGE_TYPE_NOTIFICATION_IND == mMessageType) { mDeliveryStatus = DeliveryStatus.NONE; NotificationInd notifInd = (NotificationInd) p.load(mMessageUri); interpretFrom(notifInd.getFrom(), mMessageUri); // Borrow the mBody to hold the URL of the message. mBody = new String(notifInd.getContentLocation()); mMessageSize = (int) notifInd.getMessageSize(); timestamp = notifInd.getExpiry() * 1000L; } else { MultimediaMessagePdu msg = (MultimediaMessagePdu) p.load(mMessageUri); mSlideshow = SlideshowModel.createFromPduBody(context, msg.getBody()); mAttachmentType = MessageUtils.getAttachmentType(mSlideshow); if (mMessageType == PduHeaders.MESSAGE_TYPE_RETRIEVE_CONF) { RetrieveConf retrieveConf = (RetrieveConf) msg; interpretFrom(retrieveConf.getFrom(), mMessageUri); timestamp = retrieveConf.getDate() * 1000L; } else { // Use constant string for outgoing messages mContact = mAddress = context.getString(R.string.messagelist_sender_self); timestamp = ((SendReq) msg).getDate() * 1000L; } String report = cursor.getString(columnsMap.mColumnMmsDeliveryReport); if ((report == null) || !mAddress.equals(context.getString( R.string.messagelist_sender_self))) { mDeliveryStatus = DeliveryStatus.NONE; } else { int reportInt; try { reportInt = Integer.parseInt(report); if (reportInt == PduHeaders.VALUE_YES) { mDeliveryStatus = DeliveryStatus.RECEIVED; } else { mDeliveryStatus = DeliveryStatus.NONE; } } catch (NumberFormatException nfe) { Log.e(TAG, "Value for delivery report was invalid."); mDeliveryStatus = DeliveryStatus.NONE; } } report = cursor.getString(columnsMap.mColumnMmsReadReport); if ((report == null) || !mAddress.equals(context.getString( R.string.messagelist_sender_self))) { mReadReport = false; } else { int reportInt; try { reportInt = Integer.parseInt(report); mReadReport = (reportInt == PduHeaders.VALUE_YES); } catch (NumberFormatException nfe) { Log.e(TAG, "Value for read report was invalid."); mReadReport = false; } } SlideModel slide = mSlideshow.get(0); if ((slide != null) && slide.hasText()) { TextModel tm = slide.getText(); if (tm.isDrmProtected()) { mBody = mContext.getString(R.string.drm_protected_text); } else { mBody = tm.getText(); } mTextContentType = tm.getContentType(); } mMessageSize = mSlideshow.getTotalMessageSize(); } if (!isOutgoingMessage()) { if (PduHeaders.MESSAGE_TYPE_NOTIFICATION_IND == mMessageType) { mTimestamp = context.getString(R.string.expire_on, MessageUtils.formatTimeStampString(context, timestamp, mFullTimestamp)); } else { mTimestamp = MessageUtils.formatTimeStampString(context, timestamp, mFullTimestamp); } } } else { throw new MmsException("Unknown type of the message: " + type); } }
MessageItem(Context context, String type, Cursor cursor, ColumnsMap columnsMap, Pattern highlight, boolean fullTimestamp, boolean sentTimestamp) throws MmsException { mContext = context; mMsgId = cursor.getLong(columnsMap.mColumnMsgId); mHighlight = highlight; mType = type; mFullTimestamp = fullTimestamp; mSentTimestamp = sentTimestamp; if ("sms".equals(type)) { mReadReport = false; // No read reports in sms long status = cursor.getLong(columnsMap.mColumnSmsStatus); if (status == Sms.STATUS_NONE) { // No delivery report requested mDeliveryStatus = DeliveryStatus.NONE; } else if (status >= Sms.STATUS_FAILED) { // Failure mDeliveryStatus = DeliveryStatus.FAILED; } else if (status >= Sms.STATUS_PENDING) { // Pending mDeliveryStatus = DeliveryStatus.PENDING; } else { // Success mDeliveryStatus = DeliveryStatus.RECEIVED; } mMessageUri = ContentUris.withAppendedId(Sms.CONTENT_URI, mMsgId); // Set contact and message body mBoxId = cursor.getInt(columnsMap.mColumnSmsType); mAddress = cursor.getString(columnsMap.mColumnSmsAddress); if (Sms.isOutgoingFolder(mBoxId)) { String meString = context.getString( R.string.messagelist_sender_self); mContact = meString; } else { // For incoming messages, the ADDRESS field contains the sender. mContact = Contact.get(mAddress, false).getName(); } mBody = cursor.getString(columnsMap.mColumnSmsBody); // Unless the message is currently in the progress of being sent, it gets a time stamp. if (!isOutgoingMessage()) { // Set "received" or "sent" time stamp long date = cursor.getLong(columnsMap.mColumnSmsDate); if (mSentTimestamp && (mBoxId == Sms.MESSAGE_TYPE_INBOX)) { date = cursor.getLong(columnsMap.mColumnSmsDateSent); } mTimestamp = MessageUtils.formatTimeStampString(context, date, mFullTimestamp); } mLocked = cursor.getInt(columnsMap.mColumnSmsLocked) != 0; mErrorCode = cursor.getInt(columnsMap.mColumnSmsErrorCode); } else if ("mms".equals(type)) { mMessageUri = ContentUris.withAppendedId(Mms.CONTENT_URI, mMsgId); mBoxId = cursor.getInt(columnsMap.mColumnMmsMessageBox); mMessageType = cursor.getInt(columnsMap.mColumnMmsMessageType); mErrorType = cursor.getInt(columnsMap.mColumnMmsErrorType); String subject = cursor.getString(columnsMap.mColumnMmsSubject); if (!TextUtils.isEmpty(subject)) { EncodedStringValue v = new EncodedStringValue( cursor.getInt(columnsMap.mColumnMmsSubjectCharset), PduPersister.getBytes(subject)); mSubject = v.getString(); } mLocked = cursor.getInt(columnsMap.mColumnMmsLocked) != 0; long timestamp = 0L; PduPersister p = PduPersister.getPduPersister(mContext); if (PduHeaders.MESSAGE_TYPE_NOTIFICATION_IND == mMessageType) { mDeliveryStatus = DeliveryStatus.NONE; NotificationInd notifInd = (NotificationInd) p.load(mMessageUri); interpretFrom(notifInd.getFrom(), mMessageUri); // Borrow the mBody to hold the URL of the message. mBody = new String(notifInd.getContentLocation()); mMessageSize = (int) notifInd.getMessageSize(); timestamp = notifInd.getExpiry() * 1000L; } else { MultimediaMessagePdu msg = (MultimediaMessagePdu) p.load(mMessageUri); mSlideshow = SlideshowModel.createFromPduBody(context, msg.getBody()); mAttachmentType = MessageUtils.getAttachmentType(mSlideshow); if (mMessageType == PduHeaders.MESSAGE_TYPE_RETRIEVE_CONF) { RetrieveConf retrieveConf = (RetrieveConf) msg; interpretFrom(retrieveConf.getFrom(), mMessageUri); timestamp = retrieveConf.getDate() * 1000L; } else { // Use constant string for outgoing messages mContact = mAddress = context.getString(R.string.messagelist_sender_self); timestamp = ((SendReq) msg).getDate() * 1000L; } String report = cursor.getString(columnsMap.mColumnMmsDeliveryReport); if ((report == null) || !mAddress.equals(context.getString( R.string.messagelist_sender_self))) { mDeliveryStatus = DeliveryStatus.NONE; } else { int reportInt; try { reportInt = Integer.parseInt(report); if (reportInt == PduHeaders.VALUE_YES) { mDeliveryStatus = DeliveryStatus.RECEIVED; } else { mDeliveryStatus = DeliveryStatus.NONE; } } catch (NumberFormatException nfe) { Log.e(TAG, "Value for delivery report was invalid."); mDeliveryStatus = DeliveryStatus.NONE; } } report = cursor.getString(columnsMap.mColumnMmsReadReport); if ((report == null) || !mAddress.equals(context.getString( R.string.messagelist_sender_self))) { mReadReport = false; } else { int reportInt; try { reportInt = Integer.parseInt(report); mReadReport = (reportInt == PduHeaders.VALUE_YES); } catch (NumberFormatException nfe) { Log.e(TAG, "Value for read report was invalid."); mReadReport = false; } } SlideModel slide = mSlideshow.get(0); if ((slide != null) && slide.hasText()) { TextModel tm = slide.getText(); if (tm.isDrmProtected()) { mBody = mContext.getString(R.string.drm_protected_text); } else { mBody = tm.getText(); } mTextContentType = tm.getContentType(); } mMessageSize = mSlideshow.getTotalMessageSize(); } if (!isOutgoingMessage()) { if (PduHeaders.MESSAGE_TYPE_NOTIFICATION_IND == mMessageType) { mTimestamp = context.getString(R.string.expire_on, MessageUtils.formatTimeStampString(context, timestamp, mFullTimestamp)); } else { mTimestamp = MessageUtils.formatTimeStampString(context, timestamp, mFullTimestamp); } } } else { throw new MmsException("Unknown type of the message: " + type); } }
diff --git a/src/main/java/com/gvls2downloader/gvls2proxy/ProxyServlet.java b/src/main/java/com/gvls2downloader/gvls2proxy/ProxyServlet.java index 3cb427e..caf158c 100644 --- a/src/main/java/com/gvls2downloader/gvls2proxy/ProxyServlet.java +++ b/src/main/java/com/gvls2downloader/gvls2proxy/ProxyServlet.java @@ -1,225 +1,230 @@ package com.gvls2downloader.gvls2proxy; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.conn.ClientConnectionManager; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.conn.PoolingClientConnectionManager; /** * */ @WebServlet(name = "ProxyServlet", urlPatterns = {"/ProxyServlet"}) public class ProxyServlet extends HttpServlet { private boolean requestIsRunning = false; private static final String KEY_IP = "ip"; private static final String KEY_USER = "user"; private static final String KEY_PASSWORD = "password"; protected void processRequest(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { if (requestIsRunning) { resp.setContentType("text/plain"); resp.sendError(503); resp.getWriter().write("Request already in progress!"); + return; } requestIsRunning = true; try { String ipParam = req.getParameter(KEY_IP); if (ipParam == null || ipParam.trim().equals("")) { ipParam = "192.168.8.156"; } final String ip = ipParam; String user = req.getParameter(KEY_USER); String pw = req.getParameter(KEY_PASSWORD); if (user == null || user.trim().equals("")) { user = "camuser"; } if (pw == null || pw.trim().equals("")) { pw = "password"; } resp.setContentType("video/mpeg"); SchemeRegistry schemeRegistry = new SchemeRegistry(); schemeRegistry.register(new Scheme("http", 80, PlainSocketFactory.getSocketFactory())); ClientConnectionManager cm = new PoolingClientConnectionManager(schemeRegistry); final DefaultHttpClient httpClient = new DefaultHttpClient(cm); UsernamePasswordCredentials creds = new UsernamePasswordCredentials(user, pw); httpClient.getCredentialsProvider().setCredentials(AuthScope.ANY, creds); // 1. Authenticate HttpGet httpGet = new HttpGet("http://" + ip + "/php/session_start_user.php"); //CredentialsProvider credsProvider = new BasicCredentialsProvider(); //credsProvider.setCredentials(AuthScope.ANY, creds); HttpResponse response = httpClient.execute(httpGet); System.out.println("Headers from auth response:"); printHeaders(response); InputStream is = response.getEntity().getContent(); writeStreamToFile(is, "authentication.txt"); // 2. Send Hello httpGet = new HttpGet("http://" + ip + "/cgi-bin/hello.cgi"); response = httpClient.execute(httpGet); System.out.println("Headers from hello response:"); printHeaders(response); is = response.getEntity().getContent(); writeStreamToFile(is, "hello.txt"); // 3. Send first command HttpPost httpPost = new HttpPost("http://" + ip + "/cgi-bin/cmd.cgi"); StringEntity stringEntity = new StringEntity("\"Command\":\"SetCamCtrl\",\"Params\":{\"Ctrl\":\"ModeMonitor\"}}"); httpPost.setEntity(null); response = httpClient.execute(httpPost); System.out.println("Headers from cmd response:"); printHeaders(response); is = response.getEntity().getContent(); writeStreamToFile(is, "cmd.txt"); Thread newThread = new Thread() { @Override public void run() { while (requestIsRunning) { try { HttpGet httpGet = new HttpGet("http://" + ip + "/php/session_continue.php"); HttpResponse response = httpClient.execute(httpGet); System.out.println("Headers from session continuation:"); printHeaders(response); InputStream is = response.getEntity().getContent(); writeStreamToFile(is, "session_continue.txt"); } catch (Throwable t) { t.printStackTrace(); } try { Thread.sleep(10000); } catch (Throwable t) { t.printStackTrace(); } } } }; newThread.start(); // 4. Grab MPEG-TS // /cgi-bin/movie_sp.cgi - httpGet = new HttpGet("http://" + ip + "/cgi-bin/movie_sp.cgi"); - response = httpClient.execute(httpGet); - System.out.println("Headers from video stream response:"); - printHeaders(response); - is = response.getEntity().getContent(); - writeStreamToOutput(is, resp.getOutputStream()); - System.out.println("MPEG-TS stream complete!"); + try { + httpGet = new HttpGet("http://" + ip + "/cgi-bin/movie_sp.cgi"); + response = httpClient.execute(httpGet); + System.out.println("Headers from video stream response:"); + printHeaders(response); + is = response.getEntity().getContent(); + writeStreamToOutput(is, resp.getOutputStream()); + System.out.println("MPEG-TS stream complete!"); + } catch (Exception e) { + System.out.println("Error transferring stream... shutting down thread!"); + } // 5. Disconnect session httpGet = new HttpGet("http://" + ip + "/php/session_finish.php"); response = httpClient.execute(httpGet); System.out.println("Headers from Session completion:"); printHeaders(response); is = response.getEntity().getContent(); writeStreamToFile(is, "session_complete.txt"); } finally { requestIsRunning = false; } } private static void printHeaders(HttpResponse response) { for (Header header : response.getAllHeaders()) { System.out.println("Header: " + header.getName() + ": " + header.getValue()); } System.out.println(); System.out.println(); } private static void writeStreamToFile(InputStream is, String filename) throws IOException { File file = new File(filename); System.out.println("Writing to: " + file.getAbsolutePath()); FileOutputStream fos = new FileOutputStream(file); byte[] buffer = new byte[32768]; int bytesRead; while ((bytesRead = is.read(buffer)) != -1) { fos.write(buffer, 0, bytesRead); } fos.close(); is.close(); } private void writeStreamToOutput(InputStream is, OutputStream os) throws IOException { byte[] buffer = new byte[32768]; int bytesRead; while ((bytesRead = is.read(buffer)) != -1) { os.write(buffer, 0, bytesRead); os.flush(); if (!requestIsRunning) { break; } } os.close(); is.close(); } // <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code."> /** * Handles the HTTP * <code>GET</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } /** * Handles the HTTP * <code>POST</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } /** * Returns a short description of the servlet. * * @return a String containing servlet description */ @Override public String getServletInfo() { return "Short description"; }// </editor-fold> }
false
true
protected void processRequest(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { if (requestIsRunning) { resp.setContentType("text/plain"); resp.sendError(503); resp.getWriter().write("Request already in progress!"); } requestIsRunning = true; try { String ipParam = req.getParameter(KEY_IP); if (ipParam == null || ipParam.trim().equals("")) { ipParam = "192.168.8.156"; } final String ip = ipParam; String user = req.getParameter(KEY_USER); String pw = req.getParameter(KEY_PASSWORD); if (user == null || user.trim().equals("")) { user = "camuser"; } if (pw == null || pw.trim().equals("")) { pw = "password"; } resp.setContentType("video/mpeg"); SchemeRegistry schemeRegistry = new SchemeRegistry(); schemeRegistry.register(new Scheme("http", 80, PlainSocketFactory.getSocketFactory())); ClientConnectionManager cm = new PoolingClientConnectionManager(schemeRegistry); final DefaultHttpClient httpClient = new DefaultHttpClient(cm); UsernamePasswordCredentials creds = new UsernamePasswordCredentials(user, pw); httpClient.getCredentialsProvider().setCredentials(AuthScope.ANY, creds); // 1. Authenticate HttpGet httpGet = new HttpGet("http://" + ip + "/php/session_start_user.php"); //CredentialsProvider credsProvider = new BasicCredentialsProvider(); //credsProvider.setCredentials(AuthScope.ANY, creds); HttpResponse response = httpClient.execute(httpGet); System.out.println("Headers from auth response:"); printHeaders(response); InputStream is = response.getEntity().getContent(); writeStreamToFile(is, "authentication.txt"); // 2. Send Hello httpGet = new HttpGet("http://" + ip + "/cgi-bin/hello.cgi"); response = httpClient.execute(httpGet); System.out.println("Headers from hello response:"); printHeaders(response); is = response.getEntity().getContent(); writeStreamToFile(is, "hello.txt"); // 3. Send first command HttpPost httpPost = new HttpPost("http://" + ip + "/cgi-bin/cmd.cgi"); StringEntity stringEntity = new StringEntity("\"Command\":\"SetCamCtrl\",\"Params\":{\"Ctrl\":\"ModeMonitor\"}}"); httpPost.setEntity(null); response = httpClient.execute(httpPost); System.out.println("Headers from cmd response:"); printHeaders(response); is = response.getEntity().getContent(); writeStreamToFile(is, "cmd.txt"); Thread newThread = new Thread() { @Override public void run() { while (requestIsRunning) { try { HttpGet httpGet = new HttpGet("http://" + ip + "/php/session_continue.php"); HttpResponse response = httpClient.execute(httpGet); System.out.println("Headers from session continuation:"); printHeaders(response); InputStream is = response.getEntity().getContent(); writeStreamToFile(is, "session_continue.txt"); } catch (Throwable t) { t.printStackTrace(); } try { Thread.sleep(10000); } catch (Throwable t) { t.printStackTrace(); } } } }; newThread.start(); // 4. Grab MPEG-TS // /cgi-bin/movie_sp.cgi httpGet = new HttpGet("http://" + ip + "/cgi-bin/movie_sp.cgi"); response = httpClient.execute(httpGet); System.out.println("Headers from video stream response:"); printHeaders(response); is = response.getEntity().getContent(); writeStreamToOutput(is, resp.getOutputStream()); System.out.println("MPEG-TS stream complete!"); // 5. Disconnect session httpGet = new HttpGet("http://" + ip + "/php/session_finish.php"); response = httpClient.execute(httpGet); System.out.println("Headers from Session completion:"); printHeaders(response); is = response.getEntity().getContent(); writeStreamToFile(is, "session_complete.txt"); } finally { requestIsRunning = false; } }
protected void processRequest(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { if (requestIsRunning) { resp.setContentType("text/plain"); resp.sendError(503); resp.getWriter().write("Request already in progress!"); return; } requestIsRunning = true; try { String ipParam = req.getParameter(KEY_IP); if (ipParam == null || ipParam.trim().equals("")) { ipParam = "192.168.8.156"; } final String ip = ipParam; String user = req.getParameter(KEY_USER); String pw = req.getParameter(KEY_PASSWORD); if (user == null || user.trim().equals("")) { user = "camuser"; } if (pw == null || pw.trim().equals("")) { pw = "password"; } resp.setContentType("video/mpeg"); SchemeRegistry schemeRegistry = new SchemeRegistry(); schemeRegistry.register(new Scheme("http", 80, PlainSocketFactory.getSocketFactory())); ClientConnectionManager cm = new PoolingClientConnectionManager(schemeRegistry); final DefaultHttpClient httpClient = new DefaultHttpClient(cm); UsernamePasswordCredentials creds = new UsernamePasswordCredentials(user, pw); httpClient.getCredentialsProvider().setCredentials(AuthScope.ANY, creds); // 1. Authenticate HttpGet httpGet = new HttpGet("http://" + ip + "/php/session_start_user.php"); //CredentialsProvider credsProvider = new BasicCredentialsProvider(); //credsProvider.setCredentials(AuthScope.ANY, creds); HttpResponse response = httpClient.execute(httpGet); System.out.println("Headers from auth response:"); printHeaders(response); InputStream is = response.getEntity().getContent(); writeStreamToFile(is, "authentication.txt"); // 2. Send Hello httpGet = new HttpGet("http://" + ip + "/cgi-bin/hello.cgi"); response = httpClient.execute(httpGet); System.out.println("Headers from hello response:"); printHeaders(response); is = response.getEntity().getContent(); writeStreamToFile(is, "hello.txt"); // 3. Send first command HttpPost httpPost = new HttpPost("http://" + ip + "/cgi-bin/cmd.cgi"); StringEntity stringEntity = new StringEntity("\"Command\":\"SetCamCtrl\",\"Params\":{\"Ctrl\":\"ModeMonitor\"}}"); httpPost.setEntity(null); response = httpClient.execute(httpPost); System.out.println("Headers from cmd response:"); printHeaders(response); is = response.getEntity().getContent(); writeStreamToFile(is, "cmd.txt"); Thread newThread = new Thread() { @Override public void run() { while (requestIsRunning) { try { HttpGet httpGet = new HttpGet("http://" + ip + "/php/session_continue.php"); HttpResponse response = httpClient.execute(httpGet); System.out.println("Headers from session continuation:"); printHeaders(response); InputStream is = response.getEntity().getContent(); writeStreamToFile(is, "session_continue.txt"); } catch (Throwable t) { t.printStackTrace(); } try { Thread.sleep(10000); } catch (Throwable t) { t.printStackTrace(); } } } }; newThread.start(); // 4. Grab MPEG-TS // /cgi-bin/movie_sp.cgi try { httpGet = new HttpGet("http://" + ip + "/cgi-bin/movie_sp.cgi"); response = httpClient.execute(httpGet); System.out.println("Headers from video stream response:"); printHeaders(response); is = response.getEntity().getContent(); writeStreamToOutput(is, resp.getOutputStream()); System.out.println("MPEG-TS stream complete!"); } catch (Exception e) { System.out.println("Error transferring stream... shutting down thread!"); } // 5. Disconnect session httpGet = new HttpGet("http://" + ip + "/php/session_finish.php"); response = httpClient.execute(httpGet); System.out.println("Headers from Session completion:"); printHeaders(response); is = response.getEntity().getContent(); writeStreamToFile(is, "session_complete.txt"); } finally { requestIsRunning = false; } }
diff --git a/src/main/java/de/minestar/FifthElement/commands/home/cmdHome.java b/src/main/java/de/minestar/FifthElement/commands/home/cmdHome.java index f201b7a..cb64473 100644 --- a/src/main/java/de/minestar/FifthElement/commands/home/cmdHome.java +++ b/src/main/java/de/minestar/FifthElement/commands/home/cmdHome.java @@ -1,88 +1,90 @@ /* * Copyright (C) 2012 MineStar.de * * This file is part of FifthElement. * * FifthElement is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3 of the License. * * FifthElement is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with FifthElement. If not, see <http://www.gnu.org/licenses/>. */ package de.minestar.FifthElement.commands.home; import org.bukkit.entity.Player; import de.minestar.FifthElement.core.Core; import de.minestar.FifthElement.data.Home; import de.minestar.FifthElement.statistics.home.HomeStat; import de.minestar.minestarlibrary.stats.StatisticHandler; import de.minestar.minestarlibrary.commands.AbstractExtendedCommand; import de.minestar.minestarlibrary.utils.PlayerUtils; public class cmdHome extends AbstractExtendedCommand { private static final String OTHER_HOME_PERMISSION = "fifthelement.command.otherhome"; public cmdHome(String syntax, String arguments, String node) { super(Core.NAME, syntax, arguments, node); } @Override public void execute(String[] args, Player player) { Home home = null; // OWN HOME if (args.length == 0) { home = Core.homeManager.getHome(player.getName()); if (home == null) { PlayerUtils.sendError(player, pluginName, "Du hast kein Zuhause erstellt!"); PlayerUtils.sendInfo(player, "Mit '/setHome' erstellst du dir ein Zuhause."); return; } // STORE EVENTUALLY LAST POSITION Core.backManager.handleTeleport(player); player.teleport(home.getLocation()); PlayerUtils.sendSuccess(player, pluginName, "Willkommen zu Hause."); } // HOME OF OTHER PLAYER else if (args.length == 1) { // CAN PLAYER USE OTHER HOMES if (checkSpecialPermission(player, OTHER_HOME_PERMISSION)) { // FIND THE CORRECT PLAYER NAME String targetName = PlayerUtils.getCorrectPlayerName(args[0]); if (targetName == null) { PlayerUtils.sendError(player, targetName, "Kann den Spieler '" + args[0] + "' nicht finden!"); return; } home = Core.homeManager.getHome(targetName); if (home == null) { PlayerUtils.sendError(player, pluginName, "Der Spieler '" + targetName + "' hat kein Zuhause erstellt!"); return; } // STORE EVENTUALLY LAST POSITION Core.backManager.handleTeleport(player); player.teleport(home.getLocation()); PlayerUtils.sendSuccess(player, pluginName, "Haus von '" + home.getOwner() + "'."); } } // WRONG COMMAND SYNTAX else { PlayerUtils.sendError(player, pluginName, getHelpMessage()); return; } - // FIRE STATISTIC - StatisticHandler.handleStatistic(new HomeStat(player.getName(), home.getOwner())); + if (home != null) { + // FIRE STATISTIC + StatisticHandler.handleStatistic(new HomeStat(player.getName(), home.getOwner())); + } } }
true
true
public void execute(String[] args, Player player) { Home home = null; // OWN HOME if (args.length == 0) { home = Core.homeManager.getHome(player.getName()); if (home == null) { PlayerUtils.sendError(player, pluginName, "Du hast kein Zuhause erstellt!"); PlayerUtils.sendInfo(player, "Mit '/setHome' erstellst du dir ein Zuhause."); return; } // STORE EVENTUALLY LAST POSITION Core.backManager.handleTeleport(player); player.teleport(home.getLocation()); PlayerUtils.sendSuccess(player, pluginName, "Willkommen zu Hause."); } // HOME OF OTHER PLAYER else if (args.length == 1) { // CAN PLAYER USE OTHER HOMES if (checkSpecialPermission(player, OTHER_HOME_PERMISSION)) { // FIND THE CORRECT PLAYER NAME String targetName = PlayerUtils.getCorrectPlayerName(args[0]); if (targetName == null) { PlayerUtils.sendError(player, targetName, "Kann den Spieler '" + args[0] + "' nicht finden!"); return; } home = Core.homeManager.getHome(targetName); if (home == null) { PlayerUtils.sendError(player, pluginName, "Der Spieler '" + targetName + "' hat kein Zuhause erstellt!"); return; } // STORE EVENTUALLY LAST POSITION Core.backManager.handleTeleport(player); player.teleport(home.getLocation()); PlayerUtils.sendSuccess(player, pluginName, "Haus von '" + home.getOwner() + "'."); } } // WRONG COMMAND SYNTAX else { PlayerUtils.sendError(player, pluginName, getHelpMessage()); return; } // FIRE STATISTIC StatisticHandler.handleStatistic(new HomeStat(player.getName(), home.getOwner())); }
public void execute(String[] args, Player player) { Home home = null; // OWN HOME if (args.length == 0) { home = Core.homeManager.getHome(player.getName()); if (home == null) { PlayerUtils.sendError(player, pluginName, "Du hast kein Zuhause erstellt!"); PlayerUtils.sendInfo(player, "Mit '/setHome' erstellst du dir ein Zuhause."); return; } // STORE EVENTUALLY LAST POSITION Core.backManager.handleTeleport(player); player.teleport(home.getLocation()); PlayerUtils.sendSuccess(player, pluginName, "Willkommen zu Hause."); } // HOME OF OTHER PLAYER else if (args.length == 1) { // CAN PLAYER USE OTHER HOMES if (checkSpecialPermission(player, OTHER_HOME_PERMISSION)) { // FIND THE CORRECT PLAYER NAME String targetName = PlayerUtils.getCorrectPlayerName(args[0]); if (targetName == null) { PlayerUtils.sendError(player, targetName, "Kann den Spieler '" + args[0] + "' nicht finden!"); return; } home = Core.homeManager.getHome(targetName); if (home == null) { PlayerUtils.sendError(player, pluginName, "Der Spieler '" + targetName + "' hat kein Zuhause erstellt!"); return; } // STORE EVENTUALLY LAST POSITION Core.backManager.handleTeleport(player); player.teleport(home.getLocation()); PlayerUtils.sendSuccess(player, pluginName, "Haus von '" + home.getOwner() + "'."); } } // WRONG COMMAND SYNTAX else { PlayerUtils.sendError(player, pluginName, getHelpMessage()); return; } if (home != null) { // FIRE STATISTIC StatisticHandler.handleStatistic(new HomeStat(player.getName(), home.getOwner())); } }
diff --git a/src/com/maveric/WorkoutTrackerSaveActivity.java b/src/com/maveric/WorkoutTrackerSaveActivity.java index cb776c5..faa780a 100755 --- a/src/com/maveric/WorkoutTrackerSaveActivity.java +++ b/src/com/maveric/WorkoutTrackerSaveActivity.java @@ -1,196 +1,196 @@ package com.maveric; import java.text.SimpleDateFormat; import java.util.Calendar; import android.app.ProgressDialog; import android.content.ContentValues; import android.content.Context; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.text.Editable; import android.text.TextUtils; import android.text.TextWatcher; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.EditText; import android.widget.TextView; import com.maveric.contentprovider.ExceriseProvider; import com.maveric.contentprovider.WorkoutProvider; import com.maveric.database.model.ExceriseValue; import com.maveric.database.model.WorkOutTrackerTable; public class WorkoutTrackerSaveActivity extends MavericBaseActiity { Context ctx; TextView countTypeText; TextView countTypeTime; TextView exceriseTypeText; // TextView woroutCalories; EditText inputData; Button saveData; String countData; CheckBox favoriteDataSave; String exceriseType; Boolean isCheckbox = false; @Override protected void setContentToLayout() { setContentView(R.layout.workoutdatasavecontainer); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); ctx = getApplicationContext(); Bundle extras = getIntent().getExtras(); exceriseType = extras.getString("type"); exceriseTypeText = (TextView) findViewById(R.id.excerisetypetext); exceriseTypeText.setText(exceriseType); countTypeText = (TextView) findViewById(R.id.favouritebefretext); countTypeTime = (TextView) findViewById(R.id.favouriteaftertext); inputData = (EditText) findViewById(R.id.favouritecount); favoriteDataSave = (CheckBox) findViewById(R.id.favoritcheckbox); saveData = (Button) findViewById(R.id.saveexcerisedata); inputData.addTextChangedListener(new TextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { try { countData = inputData.getText().toString(); if (Integer.parseInt(countData) > 300) { toast("please Enter correct value,Are you did excerise more than five hour? dont cheat"); return; } } catch (NumberFormatException e) { toast(" Please enter numbers only - alphabets are not accepted"); } } @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void afterTextChanged(Editable s) { } }); favoriteDataSave .setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { isCheckbox = isChecked; } }); saveData.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { if (isAllfilled()) { final ProgressDialog progressDialog = ProgressDialog.show( WorkoutTrackerSaveActivity.this, "Saving...", "Wait a few sec your data is saving"); new Thread() { public void run() { try { sleep(1000); Calendar c = Calendar.getInstance(); SimpleDateFormat format = new SimpleDateFormat( - "dd-MM-yyyy"); + "dd-MMMM-yyyy"); String cureentDate = format.format(c.getTime()); Log.i("kumar" + this.getClass(), "date" + cureentDate); ContentValues values = new ContentValues(); values.put(WorkOutTrackerTable.Column.DATE, cureentDate); values.put( WorkOutTrackerTable.Column.SELECT_EXCERISE, exceriseType); values.put(WorkOutTrackerTable.Column.WORKOUT, countData); getContentResolver() .insert(WorkoutProvider.INSERT_WORKOUT_DETAILS_URI, values); // save favourite if (isCheckbox) { ContentValues favoriteValues = new ContentValues(); favoriteValues .put(ExceriseValue.Column.FAVOURITE_STATUS, "1"); getContentResolver().update( ExceriseProvider.ADD_FAVOURITE_URI, favoriteValues, exceriseType, null); } handler.sendEmptyMessage(0); } catch (Exception e) { if (progressDialog != null) { progressDialog.dismiss(); } Log.e("kumar:" + this.getClass(), "error in sve data into workout table" + e.getMessage(), e); WorkoutTrackerSaveActivity.this.finish(); } progressDialog.dismiss(); } }.start(); } else toast(getString(R.string.REQUIRE_FIELD_TOAST)); } }); } private Boolean isAllfilled() { return !(TextUtils.isEmpty(countData)); } Handler handler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case 0: WorkoutTrackerSaveActivity.this.finish(); break; case 1: toast("favorite saved successfully"); } } }; }
true
true
public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); ctx = getApplicationContext(); Bundle extras = getIntent().getExtras(); exceriseType = extras.getString("type"); exceriseTypeText = (TextView) findViewById(R.id.excerisetypetext); exceriseTypeText.setText(exceriseType); countTypeText = (TextView) findViewById(R.id.favouritebefretext); countTypeTime = (TextView) findViewById(R.id.favouriteaftertext); inputData = (EditText) findViewById(R.id.favouritecount); favoriteDataSave = (CheckBox) findViewById(R.id.favoritcheckbox); saveData = (Button) findViewById(R.id.saveexcerisedata); inputData.addTextChangedListener(new TextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { try { countData = inputData.getText().toString(); if (Integer.parseInt(countData) > 300) { toast("please Enter correct value,Are you did excerise more than five hour? dont cheat"); return; } } catch (NumberFormatException e) { toast(" Please enter numbers only - alphabets are not accepted"); } } @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void afterTextChanged(Editable s) { } }); favoriteDataSave .setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { isCheckbox = isChecked; } }); saveData.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { if (isAllfilled()) { final ProgressDialog progressDialog = ProgressDialog.show( WorkoutTrackerSaveActivity.this, "Saving...", "Wait a few sec your data is saving"); new Thread() { public void run() { try { sleep(1000); Calendar c = Calendar.getInstance(); SimpleDateFormat format = new SimpleDateFormat( "dd-MM-yyyy"); String cureentDate = format.format(c.getTime()); Log.i("kumar" + this.getClass(), "date" + cureentDate); ContentValues values = new ContentValues(); values.put(WorkOutTrackerTable.Column.DATE, cureentDate); values.put( WorkOutTrackerTable.Column.SELECT_EXCERISE, exceriseType); values.put(WorkOutTrackerTable.Column.WORKOUT, countData); getContentResolver() .insert(WorkoutProvider.INSERT_WORKOUT_DETAILS_URI, values); // save favourite if (isCheckbox) { ContentValues favoriteValues = new ContentValues(); favoriteValues .put(ExceriseValue.Column.FAVOURITE_STATUS, "1"); getContentResolver().update( ExceriseProvider.ADD_FAVOURITE_URI, favoriteValues, exceriseType, null); } handler.sendEmptyMessage(0); } catch (Exception e) { if (progressDialog != null) { progressDialog.dismiss(); } Log.e("kumar:" + this.getClass(), "error in sve data into workout table" + e.getMessage(), e); WorkoutTrackerSaveActivity.this.finish(); } progressDialog.dismiss(); } }.start(); } else toast(getString(R.string.REQUIRE_FIELD_TOAST)); } }); }
public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); ctx = getApplicationContext(); Bundle extras = getIntent().getExtras(); exceriseType = extras.getString("type"); exceriseTypeText = (TextView) findViewById(R.id.excerisetypetext); exceriseTypeText.setText(exceriseType); countTypeText = (TextView) findViewById(R.id.favouritebefretext); countTypeTime = (TextView) findViewById(R.id.favouriteaftertext); inputData = (EditText) findViewById(R.id.favouritecount); favoriteDataSave = (CheckBox) findViewById(R.id.favoritcheckbox); saveData = (Button) findViewById(R.id.saveexcerisedata); inputData.addTextChangedListener(new TextWatcher() { @Override public void onTextChanged(CharSequence s, int start, int before, int count) { try { countData = inputData.getText().toString(); if (Integer.parseInt(countData) > 300) { toast("please Enter correct value,Are you did excerise more than five hour? dont cheat"); return; } } catch (NumberFormatException e) { toast(" Please enter numbers only - alphabets are not accepted"); } } @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void afterTextChanged(Editable s) { } }); favoriteDataSave .setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { isCheckbox = isChecked; } }); saveData.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { if (isAllfilled()) { final ProgressDialog progressDialog = ProgressDialog.show( WorkoutTrackerSaveActivity.this, "Saving...", "Wait a few sec your data is saving"); new Thread() { public void run() { try { sleep(1000); Calendar c = Calendar.getInstance(); SimpleDateFormat format = new SimpleDateFormat( "dd-MMMM-yyyy"); String cureentDate = format.format(c.getTime()); Log.i("kumar" + this.getClass(), "date" + cureentDate); ContentValues values = new ContentValues(); values.put(WorkOutTrackerTable.Column.DATE, cureentDate); values.put( WorkOutTrackerTable.Column.SELECT_EXCERISE, exceriseType); values.put(WorkOutTrackerTable.Column.WORKOUT, countData); getContentResolver() .insert(WorkoutProvider.INSERT_WORKOUT_DETAILS_URI, values); // save favourite if (isCheckbox) { ContentValues favoriteValues = new ContentValues(); favoriteValues .put(ExceriseValue.Column.FAVOURITE_STATUS, "1"); getContentResolver().update( ExceriseProvider.ADD_FAVOURITE_URI, favoriteValues, exceriseType, null); } handler.sendEmptyMessage(0); } catch (Exception e) { if (progressDialog != null) { progressDialog.dismiss(); } Log.e("kumar:" + this.getClass(), "error in sve data into workout table" + e.getMessage(), e); WorkoutTrackerSaveActivity.this.finish(); } progressDialog.dismiss(); } }.start(); } else toast(getString(R.string.REQUIRE_FIELD_TOAST)); } }); }
diff --git a/plugins/org.eclipse.birt.report.engine/src/org/eclipse/birt/report/engine/layout/pdf/emitter/TableLayout.java b/plugins/org.eclipse.birt.report.engine/src/org/eclipse/birt/report/engine/layout/pdf/emitter/TableLayout.java index 3f9e476da..6f3288291 100644 --- a/plugins/org.eclipse.birt.report.engine/src/org/eclipse/birt/report/engine/layout/pdf/emitter/TableLayout.java +++ b/plugins/org.eclipse.birt.report.engine/src/org/eclipse/birt/report/engine/layout/pdf/emitter/TableLayout.java @@ -1,907 +1,910 @@ /*********************************************************************** * Copyright (c) 2008 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation ***********************************************************************/ package org.eclipse.birt.report.engine.layout.pdf.emitter; import java.util.ArrayList; import java.util.Iterator; import org.eclipse.birt.report.engine.content.IBandContent; import org.eclipse.birt.report.engine.content.ICellContent; import org.eclipse.birt.report.engine.content.IColumn; import org.eclipse.birt.report.engine.content.IContent; import org.eclipse.birt.report.engine.content.ILabelContent; import org.eclipse.birt.report.engine.content.IReportContent; import org.eclipse.birt.report.engine.content.IRowContent; import org.eclipse.birt.report.engine.content.IStyle; import org.eclipse.birt.report.engine.content.ITableBandContent; import org.eclipse.birt.report.engine.content.ITableContent; import org.eclipse.birt.report.engine.css.engine.StyleConstants; import org.eclipse.birt.report.engine.ir.DimensionType; import org.eclipse.birt.report.engine.ir.EngineIRConstants; import org.eclipse.birt.report.engine.layout.area.impl.AbstractArea; import org.eclipse.birt.report.engine.layout.area.impl.AreaFactory; import org.eclipse.birt.report.engine.layout.area.impl.CellArea; import org.eclipse.birt.report.engine.layout.area.impl.ContainerArea; import org.eclipse.birt.report.engine.layout.area.impl.RowArea; import org.eclipse.birt.report.engine.layout.area.impl.TableArea; import org.eclipse.birt.report.engine.layout.pdf.util.PropertyUtil; public class TableLayout extends RepeatableLayout { /** * table content */ private ITableContent tableContent; /** * number of table column */ protected int columnNumber; /** * the first visible column id of the table. */ protected int startCol = -1; /** * the last visible column id of the table. */ protected int endCol = -1; /** * table width */ protected int tableWidth; protected TableLayoutInfo layoutInfo = null; protected TableContext tableContext = null; protected ColumnWidthResolver columnWidthResolver; protected TableAreaLayout regionLayout = null; public TableLayout( LayoutEngineContext context, ContainerLayout parent, IContent content ) { super( context, parent, content ); tableContent = (ITableContent) content; columnWidthResolver = new ColumnWidthResolver( tableContent ); columnNumber = tableContent.getColumnCount( ); boolean isBlock = !PropertyUtil.isInlineElement(content); isInBlockStacking &= isBlock; } protected void createRoot( ) { currentContext.root = AreaFactory.createTableArea( (ITableContent) content ); currentContext.root.setWidth( tableWidth ); } public TableLayoutInfo getLayoutInfo( ) { return layoutInfo; } protected void buildTableLayoutInfo( ) { this.layoutInfo = resolveTableFixedLayout((TableArea)currentContext.root ); } public int getColumnCount() { if(tableContent!=null) { return tableContent.getColumnCount(); } return 0; } protected void checkInlineBlock() { if(PropertyUtil.isInlineElement(tableContent)) { if(parent instanceof IInlineStackingLayout) { int avaWidth = parent.getCurrentMaxContentWidth( ); calculateSpecifiedWidth( ); if(avaWidth<specifiedWidth && specifiedWidth>0 && specifiedWidth<parent.getMaxAvaWidth()) { ((IInlineStackingLayout)parent).endLine(); } } } } protected void initialize( ) { checkInlineBlock(); currentContext = new TableContext( ); contextList.add( currentContext ); tableContext = (TableContext)currentContext; createRoot( ); buildTableLayoutInfo( ); currentContext.root.setWidth( layoutInfo.getTableWidth( ) ); currentContext.maxAvaWidth = layoutInfo.getTableWidth( ); if ( parent != null ) { currentContext.root.setAllocatedHeight( parent.getCurrentMaxContentHeight( ) ); } else { currentContext.root.setAllocatedHeight( context.getMaxHeight( ) ); } if ( tableContext.layout == null ) { int start = 0; int end = tableContent.getColumnCount( ) -1; tableContext.layout = new TableAreaLayout( tableContent, layoutInfo, start, end ); //layout.initTableLayout( context.getUnresolvedRowHint( tableContent ) ); } currentContext.maxAvaHeight = currentContext.root.getContentHeight( ) - getBottomBorderWidth( ); addCaption( tableContent.getCaption( ) ); repeatHeader(); } protected void closeLayout( ) { super.closeLayout( ); if ( PropertyUtil.isInlineElement( tableContent ) && parent != null ) { parent.gotoFirstPage( ); } } protected void closeLayout( ContainerContext currentContext, int index, boolean finished ) { if(currentContext.root==null || currentContext.root.getChildrenCount()==0) { return; } /* * 1. resolve all unresolved cell 2. resolve table bottom border 3. * update height of Root area 4. update the status of TableAreaLayout */ TableContext tableContext = (TableContext)currentContext; int borderHeight = 0; if ( tableContext.layout != null ) { int height = tableContext.layout.resolveAll( ); if ( 0 != height) { currentContext.currentBP = currentContext.currentBP + height; } borderHeight = tableContext.layout.resolveBottomBorder( ); tableContext.layout.remove( (TableArea) currentContext.root ); } currentContext.root.setHeight( currentContext.currentBP + getOffsetY( ) + borderHeight ); parent.addToRoot( currentContext.root, index ); regionLayout = null; } private int getBottomBorderWidth( ) { IStyle style = currentContext.root.getContent( ).getComputedStyle( ); int borderHeight = PropertyUtil.getDimensionValue( style .getProperty( StyleConstants.STYLE_BORDER_BOTTOM_WIDTH ) ); return borderHeight; } public int getColumnNumber( ) { return columnNumber; } /** * resolve cell border conflict * * @param cellArea */ public void resolveBorderConflict( CellArea cellArea, boolean isFirst ) { if ( tableContext.layout != null ) { tableContext.layout.resolveBorderConflict( cellArea, isFirst ); } } private class ColumnWidthResolver { ITableContent table; public ColumnWidthResolver( ITableContent table ) { this.table = table; } /** * Calculates the column width for the table. the return value should be * each column width in point. * * @param columns * The column width specified in report design. * @param tableWidth * The suggested table width. If isTableWidthDefined is true, * this value is user defined table width; otherwise, it is * the max possible width for the table. * @param isTableWidthDefined * The flag to indicate whether the table width has been * defined explicitly. * @return each column width in point. */ protected int[] formalize( DimensionType[] columns, int tableWidth, boolean isTableWidthDefined ) { ArrayList percentageList = new ArrayList( ); ArrayList unsetList = new ArrayList( ); ArrayList preFixedList = new ArrayList( ); int[] resolvedColumnWidth = new int[columns.length]; double total = 0.0f; int fixedLength = 0; for ( int i = 0; i < columns.length; i++ ) { if ( columns[i] == null ) { unsetList.add( new Integer( i ) ); } else if ( EngineIRConstants.UNITS_PERCENTAGE.equals( columns[i] .getUnits( ) ) ) { percentageList.add( new Integer( i ) ); total += columns[i].getMeasure( ); } else if ( EngineIRConstants.UNITS_EM.equals( columns[i] .getUnits( ) ) || EngineIRConstants.UNITS_EX.equals( columns[i] .getUnits( ) ) ) { int len = TableLayout.this.getDimensionValue( columns[i], PropertyUtil.getDimensionValue( table .getComputedStyle( ).getProperty( StyleConstants.STYLE_FONT_SIZE ) ) ); resolvedColumnWidth[i] = len; fixedLength += len; } else { int len = TableLayout.this.getDimensionValue( columns[i], tableWidth ); resolvedColumnWidth[i] = len; preFixedList.add( new Integer( i ) ); fixedLength += len; } } // all the columns have fixed width. if ( !isTableWidthDefined && unsetList.isEmpty( ) && percentageList.isEmpty( ) ) { return resolvedColumnWidth; } if ( fixedLength >= tableWidth ) { for ( int i = 0; i < unsetList.size( ); i++ ) { Integer index = (Integer) unsetList.get( i ); resolvedColumnWidth[index.intValue( )] = 0; } for ( int i = 0; i < percentageList.size( ); i++ ) { Integer index = (Integer) percentageList.get( i ); resolvedColumnWidth[index.intValue( )] = 0; } return resolvedColumnWidth; } if ( unsetList.isEmpty( ) ) { if ( percentageList.isEmpty( ) ) { int left = tableWidth - fixedLength; - int delta = left / preFixedList.size( ); - for ( int i = 0; i < preFixedList.size( ); i++ ) + if ( !preFixedList.isEmpty( ) ) { - Integer index = (Integer) preFixedList.get( i ); - resolvedColumnWidth[index.intValue( )] += delta; + int delta = left / preFixedList.size( ); + for ( int i = 0; i < preFixedList.size( ); i++ ) + { + Integer index = (Integer) preFixedList.get( i ); + resolvedColumnWidth[index.intValue( )] += delta; + } } } else { float leftPercentage = ( ( (float) ( tableWidth - fixedLength ) ) / tableWidth ) * 100.0f; double ratio = leftPercentage / total; for ( int i = 0; i < percentageList.size( ); i++ ) { Integer index = (Integer) percentageList.get( i ); columns[index.intValue( )] = new DimensionType( columns[index.intValue( )].getMeasure( ) * ratio, columns[index.intValue( )] .getUnits( ) ); resolvedColumnWidth[index.intValue( )] = TableLayout.this .getDimensionValue( columns[index.intValue( )], tableWidth ); } } } // unsetList is not empty. else { if ( percentageList.isEmpty( ) ) { int left = tableWidth - fixedLength; int eachWidth = left / unsetList.size( ); for ( int i = 0; i < unsetList.size( ); i++ ) { Integer index = (Integer) unsetList.get( i ); resolvedColumnWidth[index.intValue( )] = eachWidth; } } else { float leftPercentage = ( ( (float) ( tableWidth - fixedLength ) ) / tableWidth ) * 100.0f; if ( leftPercentage <= total ) { double ratio = leftPercentage / total; for ( int i = 0; i < unsetList.size( ); i++ ) { Integer index = (Integer) unsetList.get( i ); resolvedColumnWidth[index.intValue( )] = 0; } for ( int i = 0; i < percentageList.size( ); i++ ) { Integer index = (Integer) percentageList.get( i ); columns[index.intValue( )] = new DimensionType( columns[index.intValue( )].getMeasure( ) * ratio, columns[index.intValue( )] .getUnits( ) ); resolvedColumnWidth[index.intValue( )] = TableLayout.this .getDimensionValue( columns[index .intValue( )], tableWidth ); } } else { int usedLength = fixedLength; for ( int i = 0; i < percentageList.size( ); i++ ) { Integer index = (Integer) percentageList.get( i ); int width = TableLayout.this.getDimensionValue( columns[index.intValue( )], tableWidth ); usedLength += width; resolvedColumnWidth[index.intValue( )] = width; } int left = tableWidth - usedLength; int eachWidth = left / unsetList.size( ); for ( int i = 0; i < unsetList.size( ); i++ ) { Integer index = (Integer) unsetList.get( i ); resolvedColumnWidth[index.intValue( )] = eachWidth; } } } } return resolvedColumnWidth; } public int[] resolveFixedLayout(int maxWidth) { int columnNumber = table.getColumnCount( ); DimensionType[] columns = new DimensionType[columnNumber]; //handle visibility for(int i=0; i<columnNumber; i++) { IColumn column = table.getColumn( i ); DimensionType w = column.getWidth(); if ( startCol < 0 ) { startCol = i; } endCol = i; if(w==null) { columns[i] = null; } else { columns[i] = new DimensionType(w.getMeasure(), w.getUnits()); } } if ( startCol < 0 ) startCol = 0; if ( endCol < 0 ) endCol = 0; boolean isTableWidthDefined = false; int specifiedWidth = getDimensionValue( tableContent.getWidth( ), maxWidth ); int tableWidth; if(specifiedWidth>0) { tableWidth = specifiedWidth; isTableWidthDefined = true; } else { tableWidth = maxWidth; isTableWidthDefined = false; } return formalize(columns, tableWidth, isTableWidthDefined ); } public int[] resolve( int specifiedWidth, int maxWidth ) { assert ( specifiedWidth <= maxWidth ); int columnNumber = table.getColumnCount( ); int[] columns = new int[columnNumber]; int columnWithWidth = 0; int colSum = 0; for ( int j = 0; j < table.getColumnCount( ); j++ ) { IColumn column = table.getColumn( j ); int columnWidth = getDimensionValue( column.getWidth( ), tableWidth ); if ( columnWidth > 0 ) { columns[j] = columnWidth; colSum += columnWidth; columnWithWidth++; } else { columns[j] = -1; } } if ( columnWithWidth == columnNumber ) { if ( colSum <= maxWidth ) { return columns; } else { float delta = colSum - maxWidth; for ( int i = 0; i < columnNumber; i++ ) { columns[i] -= (int) ( delta * columns[i] / colSum ); } return columns; } } else { if ( specifiedWidth == 0 ) { if ( colSum < maxWidth ) { distributeLeftWidth( columns, ( maxWidth - colSum ) / ( columnNumber - columnWithWidth ) ); } else { redistributeWidth( columns, colSum - maxWidth + ( columnNumber - columnWithWidth ) * maxWidth / columnNumber, maxWidth, colSum ); } } else { if ( colSum < specifiedWidth ) { distributeLeftWidth( columns, ( specifiedWidth - colSum ) / ( columnNumber - columnWithWidth ) ); } else { if ( colSum < maxWidth ) { distributeLeftWidth( columns, ( maxWidth - colSum ) / ( columnNumber - columnWithWidth ) ); } else { redistributeWidth( columns, colSum - specifiedWidth + ( columnNumber - columnWithWidth ) * specifiedWidth / columnNumber, specifiedWidth, colSum ); } } } } return columns; } private void redistributeWidth( int cols[], int delta, int sum, int currentSum ) { int avaWidth = sum / cols.length; for ( int i = 0; i < cols.length; i++ ) { if ( cols[i] < 0 ) { cols[i] = avaWidth; } else { cols[i] -= (int) ( ( (float) cols[i] ) * delta / currentSum ); } } } private void distributeLeftWidth( int cols[], int avaWidth ) { for ( int i = 0; i < cols.length; i++ ) { if ( cols[i] < 0 ) { cols[i] = avaWidth; } } } } private TableLayoutInfo resolveTableFixedLayout(TableArea area) { assert(parent!=null); int parentMaxWidth = parent.currentContext.maxAvaWidth; IStyle style = area.getStyle( ); int marginWidth = getDimensionValue( style .getProperty( StyleConstants.STYLE_MARGIN_LEFT ) ) + getDimensionValue( style .getProperty( StyleConstants.STYLE_MARGIN_RIGHT ) ); return new TableLayoutInfo( columnWidthResolver.resolveFixedLayout( parentMaxWidth - marginWidth ) ); } private TableLayoutInfo resolveTableLayoutInfo( TableArea area ) { assert ( parent != null ); int avaWidth = parent.getCurrentMaxContentWidth( ) - parent.currentContext.currentIP; int parentMaxWidth = parent.getCurrentMaxContentWidth( ); IStyle style = area.getStyle( ); int marginWidth = getDimensionValue( style .getProperty( StyleConstants.STYLE_MARGIN_LEFT ) ) + getDimensionValue( style .getProperty( StyleConstants.STYLE_MARGIN_RIGHT ) ); int specifiedWidth = getDimensionValue( tableContent.getWidth( ), parentMaxWidth ); if ( specifiedWidth + marginWidth > parentMaxWidth ) { specifiedWidth = 0; } boolean isInline = PropertyUtil.isInlineElement( content ); if ( specifiedWidth == 0 ) { if ( isInline ) { if ( avaWidth - marginWidth > parentMaxWidth / 4 ) { tableWidth = avaWidth - marginWidth; } else { tableWidth = parentMaxWidth - marginWidth; } } else { tableWidth = avaWidth - marginWidth; } return new TableLayoutInfo( columnWidthResolver.resolve( tableWidth, tableWidth ) ) ; } else { if ( !isInline ) { tableWidth = Math.min( specifiedWidth, avaWidth - marginWidth ); return new TableLayoutInfo( columnWidthResolver.resolve( tableWidth, avaWidth - marginWidth ) ) ; } else { tableWidth = Math.min( specifiedWidth, parentMaxWidth - marginWidth ); return new TableLayoutInfo( columnWidthResolver.resolve( tableWidth, parentMaxWidth - marginWidth ) ) ; } } } /** * update row height * * @param row */ public void updateRow( RowArea row, int specifiedHeight, int index ) { tableContext = (TableContext)contextList.get(index); if ( tableContext.layout != null ) { tableContext.layout.updateRow( row, specifiedHeight ); } } public void addRow( RowArea row, int index ) { tableContext = (TableContext)contextList.get(index); if ( tableContext.layout != null ) { tableContext.layout.addRow( row ); } } public int getXPos( int columnID ) { if ( layoutInfo != null ) { return layoutInfo.getXPosition( columnID ); } return 0; } public int getCellWidth( int startColumn, int endColumn ) { if ( layoutInfo != null ) { return layoutInfo.getCellWidth( startColumn, endColumn ); } return 0; } public TableRegionLayout getTableRegionLayout() { if(regionLayout==null) { regionLayout = new TableAreaLayout( tableContent, layoutInfo, startCol, endCol ); } return new TableRegionLayout(context, tableContent, layoutInfo, regionLayout); } protected IContent generateCaptionRow(String caption) { IReportContent report = tableContent.getReportContent( ); ILabelContent captionLabel = report.createLabelContent( ); captionLabel.setText( caption ); captionLabel.getStyle( ).setProperty( IStyle.STYLE_TEXT_ALIGN, IStyle.CENTER_VALUE ); ICellContent cell = report.createCellContent( ); cell.setColSpan( tableContent.getColumnCount( ) ); cell.setRowSpan( 1 ); cell.setColumn( 0 ); cell.getStyle( ).setProperty( IStyle.STYLE_BORDER_TOP_STYLE, IStyle.HIDDEN_VALUE ); cell.getStyle( ).setProperty( IStyle.STYLE_BORDER_BOTTOM_STYLE, IStyle.HIDDEN_VALUE ); cell.getStyle( ).setProperty( IStyle.STYLE_BORDER_LEFT_STYLE, IStyle.HIDDEN_VALUE ); cell.getStyle( ).setProperty( IStyle.STYLE_BORDER_RIGHT_STYLE, IStyle.HIDDEN_VALUE ); captionLabel.setParent( cell ); cell.getChildren( ).add( captionLabel ); IRowContent row = report.createRowContent( ); row.getChildren( ).add( cell ); cell.setParent( row ); row.setParent( tableContent ); return row; } protected void repeatHeader() { if ( bandStatus == IBandContent.BAND_HEADER ) { return; } ITableBandContent header = (ITableBandContent) tableContent.getHeader( ); if ( !tableContent.isHeaderRepeat( ) || header == null ) { return; } if ( header.getChildren( ).isEmpty( ) ) { return; } TableRegionLayout rLayout = getTableRegionLayout(); rLayout.initialize( header ); rLayout.layout( ); TableArea tableRegion = (TableArea) header .getExtension( IContent.LAYOUT_EXTENSION ); if ( tableRegion != null && tableRegion.getAllocatedHeight( ) < getCurrentMaxContentHeight( ) ) { //add to layout Iterator iter = tableRegion.getChildren(); TableContext tableContext = (TableContext)contextList.getLast(); while ( iter.hasNext( ) ) { ContainerArea area = (ContainerArea) iter.next( ); Iterator rowIter = area.getChildren(); while(rowIter.hasNext()) { AbstractArea row = (AbstractArea) rowIter.next( ); if(row instanceof RowArea) { tableContext.layout.addRow( (RowArea)row ); } } } // add to root iter = tableRegion.getChildren( ); while ( iter.hasNext( ) ) { AbstractArea area = (AbstractArea) iter.next( ); addArea( area ); } } content.setExtension( IContent.LAYOUT_EXTENSION, null ); } protected void addCaption( String caption ) { if ( caption == null || "".equals( caption ) ) //$NON-NLS-1$ { return; } TableRegionLayout rLayout = getTableRegionLayout(); IContent row = generateCaptionRow(tableContent.getCaption( )); rLayout.initialize( row ); rLayout.layout( ); TableArea tableRegion = (TableArea) row .getExtension( IContent.LAYOUT_EXTENSION ); if ( tableRegion != null ) { // add to root Iterator iter = tableRegion.getChildren( ); while ( iter.hasNext( ) ) { RowArea rowArea = (RowArea) iter.next( ); addArea( rowArea ); } } row.setExtension( IContent.LAYOUT_EXTENSION, null ); regionLayout = null; } public class TableLayoutInfo { public TableLayoutInfo( int[] colWidth ) { this.colWidth = colWidth; this.columnNumber = colWidth.length; this.xPositions = new int[columnNumber]; this.tableWidth = 0; if ( tableContent.isRTL( ) ) // bidi_hcg { for ( int i = 0; i < columnNumber; i++ ) { xPositions[i] = parent.getCurrentMaxContentWidth( ) - tableWidth - colWidth[i]; tableWidth += colWidth[i]; } } else // ltr { for ( int i = 0; i < columnNumber; i++ ) { xPositions[i] = tableWidth; tableWidth += colWidth[i]; } } } public int getTableWidth( ) { return this.tableWidth; } public int getXPosition( int index ) { return xPositions[index]; } /** * get cell width * * @param startColumn * @param endColumn * @return */ public int getCellWidth( int startColumn, int endColumn ) { assert ( startColumn < endColumn ); assert ( colWidth != null ); int sum = 0; for ( int i = startColumn; i < endColumn; i++ ) { sum += colWidth[i]; } return sum; } protected int columnNumber; protected int tableWidth; /** * Array of column width */ protected int[] colWidth = null; /** * array of position for each column */ protected int[] xPositions = null; } public boolean addArea( AbstractArea area ) { return super.addArea( area ); } class TableContext extends ContainerContext { TableAreaLayout layout; } }
false
true
protected int[] formalize( DimensionType[] columns, int tableWidth, boolean isTableWidthDefined ) { ArrayList percentageList = new ArrayList( ); ArrayList unsetList = new ArrayList( ); ArrayList preFixedList = new ArrayList( ); int[] resolvedColumnWidth = new int[columns.length]; double total = 0.0f; int fixedLength = 0; for ( int i = 0; i < columns.length; i++ ) { if ( columns[i] == null ) { unsetList.add( new Integer( i ) ); } else if ( EngineIRConstants.UNITS_PERCENTAGE.equals( columns[i] .getUnits( ) ) ) { percentageList.add( new Integer( i ) ); total += columns[i].getMeasure( ); } else if ( EngineIRConstants.UNITS_EM.equals( columns[i] .getUnits( ) ) || EngineIRConstants.UNITS_EX.equals( columns[i] .getUnits( ) ) ) { int len = TableLayout.this.getDimensionValue( columns[i], PropertyUtil.getDimensionValue( table .getComputedStyle( ).getProperty( StyleConstants.STYLE_FONT_SIZE ) ) ); resolvedColumnWidth[i] = len; fixedLength += len; } else { int len = TableLayout.this.getDimensionValue( columns[i], tableWidth ); resolvedColumnWidth[i] = len; preFixedList.add( new Integer( i ) ); fixedLength += len; } } // all the columns have fixed width. if ( !isTableWidthDefined && unsetList.isEmpty( ) && percentageList.isEmpty( ) ) { return resolvedColumnWidth; } if ( fixedLength >= tableWidth ) { for ( int i = 0; i < unsetList.size( ); i++ ) { Integer index = (Integer) unsetList.get( i ); resolvedColumnWidth[index.intValue( )] = 0; } for ( int i = 0; i < percentageList.size( ); i++ ) { Integer index = (Integer) percentageList.get( i ); resolvedColumnWidth[index.intValue( )] = 0; } return resolvedColumnWidth; } if ( unsetList.isEmpty( ) ) { if ( percentageList.isEmpty( ) ) { int left = tableWidth - fixedLength; int delta = left / preFixedList.size( ); for ( int i = 0; i < preFixedList.size( ); i++ ) { Integer index = (Integer) preFixedList.get( i ); resolvedColumnWidth[index.intValue( )] += delta; } } else { float leftPercentage = ( ( (float) ( tableWidth - fixedLength ) ) / tableWidth ) * 100.0f; double ratio = leftPercentage / total; for ( int i = 0; i < percentageList.size( ); i++ ) { Integer index = (Integer) percentageList.get( i ); columns[index.intValue( )] = new DimensionType( columns[index.intValue( )].getMeasure( ) * ratio, columns[index.intValue( )] .getUnits( ) ); resolvedColumnWidth[index.intValue( )] = TableLayout.this .getDimensionValue( columns[index.intValue( )], tableWidth ); } } } // unsetList is not empty. else { if ( percentageList.isEmpty( ) ) { int left = tableWidth - fixedLength; int eachWidth = left / unsetList.size( ); for ( int i = 0; i < unsetList.size( ); i++ ) { Integer index = (Integer) unsetList.get( i ); resolvedColumnWidth[index.intValue( )] = eachWidth; } } else { float leftPercentage = ( ( (float) ( tableWidth - fixedLength ) ) / tableWidth ) * 100.0f; if ( leftPercentage <= total ) { double ratio = leftPercentage / total; for ( int i = 0; i < unsetList.size( ); i++ ) { Integer index = (Integer) unsetList.get( i ); resolvedColumnWidth[index.intValue( )] = 0; } for ( int i = 0; i < percentageList.size( ); i++ ) { Integer index = (Integer) percentageList.get( i ); columns[index.intValue( )] = new DimensionType( columns[index.intValue( )].getMeasure( ) * ratio, columns[index.intValue( )] .getUnits( ) ); resolvedColumnWidth[index.intValue( )] = TableLayout.this .getDimensionValue( columns[index .intValue( )], tableWidth ); } } else { int usedLength = fixedLength; for ( int i = 0; i < percentageList.size( ); i++ ) { Integer index = (Integer) percentageList.get( i ); int width = TableLayout.this.getDimensionValue( columns[index.intValue( )], tableWidth ); usedLength += width; resolvedColumnWidth[index.intValue( )] = width; } int left = tableWidth - usedLength; int eachWidth = left / unsetList.size( ); for ( int i = 0; i < unsetList.size( ); i++ ) { Integer index = (Integer) unsetList.get( i ); resolvedColumnWidth[index.intValue( )] = eachWidth; } } } } return resolvedColumnWidth; }
protected int[] formalize( DimensionType[] columns, int tableWidth, boolean isTableWidthDefined ) { ArrayList percentageList = new ArrayList( ); ArrayList unsetList = new ArrayList( ); ArrayList preFixedList = new ArrayList( ); int[] resolvedColumnWidth = new int[columns.length]; double total = 0.0f; int fixedLength = 0; for ( int i = 0; i < columns.length; i++ ) { if ( columns[i] == null ) { unsetList.add( new Integer( i ) ); } else if ( EngineIRConstants.UNITS_PERCENTAGE.equals( columns[i] .getUnits( ) ) ) { percentageList.add( new Integer( i ) ); total += columns[i].getMeasure( ); } else if ( EngineIRConstants.UNITS_EM.equals( columns[i] .getUnits( ) ) || EngineIRConstants.UNITS_EX.equals( columns[i] .getUnits( ) ) ) { int len = TableLayout.this.getDimensionValue( columns[i], PropertyUtil.getDimensionValue( table .getComputedStyle( ).getProperty( StyleConstants.STYLE_FONT_SIZE ) ) ); resolvedColumnWidth[i] = len; fixedLength += len; } else { int len = TableLayout.this.getDimensionValue( columns[i], tableWidth ); resolvedColumnWidth[i] = len; preFixedList.add( new Integer( i ) ); fixedLength += len; } } // all the columns have fixed width. if ( !isTableWidthDefined && unsetList.isEmpty( ) && percentageList.isEmpty( ) ) { return resolvedColumnWidth; } if ( fixedLength >= tableWidth ) { for ( int i = 0; i < unsetList.size( ); i++ ) { Integer index = (Integer) unsetList.get( i ); resolvedColumnWidth[index.intValue( )] = 0; } for ( int i = 0; i < percentageList.size( ); i++ ) { Integer index = (Integer) percentageList.get( i ); resolvedColumnWidth[index.intValue( )] = 0; } return resolvedColumnWidth; } if ( unsetList.isEmpty( ) ) { if ( percentageList.isEmpty( ) ) { int left = tableWidth - fixedLength; if ( !preFixedList.isEmpty( ) ) { int delta = left / preFixedList.size( ); for ( int i = 0; i < preFixedList.size( ); i++ ) { Integer index = (Integer) preFixedList.get( i ); resolvedColumnWidth[index.intValue( )] += delta; } } } else { float leftPercentage = ( ( (float) ( tableWidth - fixedLength ) ) / tableWidth ) * 100.0f; double ratio = leftPercentage / total; for ( int i = 0; i < percentageList.size( ); i++ ) { Integer index = (Integer) percentageList.get( i ); columns[index.intValue( )] = new DimensionType( columns[index.intValue( )].getMeasure( ) * ratio, columns[index.intValue( )] .getUnits( ) ); resolvedColumnWidth[index.intValue( )] = TableLayout.this .getDimensionValue( columns[index.intValue( )], tableWidth ); } } } // unsetList is not empty. else { if ( percentageList.isEmpty( ) ) { int left = tableWidth - fixedLength; int eachWidth = left / unsetList.size( ); for ( int i = 0; i < unsetList.size( ); i++ ) { Integer index = (Integer) unsetList.get( i ); resolvedColumnWidth[index.intValue( )] = eachWidth; } } else { float leftPercentage = ( ( (float) ( tableWidth - fixedLength ) ) / tableWidth ) * 100.0f; if ( leftPercentage <= total ) { double ratio = leftPercentage / total; for ( int i = 0; i < unsetList.size( ); i++ ) { Integer index = (Integer) unsetList.get( i ); resolvedColumnWidth[index.intValue( )] = 0; } for ( int i = 0; i < percentageList.size( ); i++ ) { Integer index = (Integer) percentageList.get( i ); columns[index.intValue( )] = new DimensionType( columns[index.intValue( )].getMeasure( ) * ratio, columns[index.intValue( )] .getUnits( ) ); resolvedColumnWidth[index.intValue( )] = TableLayout.this .getDimensionValue( columns[index .intValue( )], tableWidth ); } } else { int usedLength = fixedLength; for ( int i = 0; i < percentageList.size( ); i++ ) { Integer index = (Integer) percentageList.get( i ); int width = TableLayout.this.getDimensionValue( columns[index.intValue( )], tableWidth ); usedLength += width; resolvedColumnWidth[index.intValue( )] = width; } int left = tableWidth - usedLength; int eachWidth = left / unsetList.size( ); for ( int i = 0; i < unsetList.size( ); i++ ) { Integer index = (Integer) unsetList.get( i ); resolvedColumnWidth[index.intValue( )] = eachWidth; } } } } return resolvedColumnWidth; }
diff --git a/src/main/java/org/elasticsearch/gateway/local/state/meta/LocalGatewayMetaState.java b/src/main/java/org/elasticsearch/gateway/local/state/meta/LocalGatewayMetaState.java index 08573905e80..6401b362316 100644 --- a/src/main/java/org/elasticsearch/gateway/local/state/meta/LocalGatewayMetaState.java +++ b/src/main/java/org/elasticsearch/gateway/local/state/meta/LocalGatewayMetaState.java @@ -1,689 +1,689 @@ /* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.gateway.local.state.meta; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Closeables; import org.elasticsearch.ElasticSearchIllegalArgumentException; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.CachedStreamOutput; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.*; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.threadpool.ThreadPool; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ScheduledFuture; /** * */ public class LocalGatewayMetaState extends AbstractComponent implements ClusterStateListener { static enum AutoImportDangledState { NO() { @Override public boolean shouldImport() { return false; } }, YES() { @Override public boolean shouldImport() { return true; } }, CLOSED() { @Override public boolean shouldImport() { return true; } }; public abstract boolean shouldImport(); public static AutoImportDangledState fromString(String value) { if ("no".equalsIgnoreCase(value)) { return NO; } else if ("yes".equalsIgnoreCase(value)) { return YES; } else if ("closed".equalsIgnoreCase(value)) { return CLOSED; } else { throw new ElasticSearchIllegalArgumentException("failed to parse [" + value + "], not a valid auto dangling import type"); } } } private final NodeEnvironment nodeEnv; private final ThreadPool threadPool; private final LocalAllocateDangledIndices allocateDangledIndices; @Nullable private volatile MetaData currentMetaData; private final XContentType format; private final ToXContent.Params formatParams; private final AutoImportDangledState autoImportDangled; private final TimeValue danglingTimeout; private final Map<String, DanglingIndex> danglingIndices = ConcurrentCollections.newConcurrentMap(); private final Object danglingMutex = new Object(); @Inject public LocalGatewayMetaState(Settings settings, ThreadPool threadPool, NodeEnvironment nodeEnv, TransportNodesListGatewayMetaState nodesListGatewayMetaState, LocalAllocateDangledIndices allocateDangledIndices) throws Exception { super(settings); this.nodeEnv = nodeEnv; this.threadPool = threadPool; this.format = XContentType.fromRestContentType(settings.get("format", "smile")); this.allocateDangledIndices = allocateDangledIndices; nodesListGatewayMetaState.init(this); if (this.format == XContentType.SMILE) { Map<String, String> params = Maps.newHashMap(); params.put("binary", "true"); formatParams = new ToXContent.MapParams(params); } else { formatParams = ToXContent.EMPTY_PARAMS; } this.autoImportDangled = AutoImportDangledState.fromString(settings.get("gateway.local.auto_import_dangled", AutoImportDangledState.YES.toString())); this.danglingTimeout = settings.getAsTime("gateway.local.dangling_timeout", TimeValue.timeValueHours(2)); logger.debug("using gateway.local.auto_import_dangled [{}], with gateway.local.dangling_timeout [{}]", this.autoImportDangled, this.danglingTimeout); if (DiscoveryNode.masterNode(settings)) { try { pre019Upgrade(); long start = System.currentTimeMillis(); loadState(); logger.debug("took {} to load state", TimeValue.timeValueMillis(System.currentTimeMillis() - start)); } catch (Exception e) { logger.error("failed to read local state, exiting...", e); throw e; } } } public MetaData loadMetaState() throws Exception { return loadState(); } public boolean isDangling(String index) { return danglingIndices.containsKey(index); } @Override public void clusterChanged(ClusterChangedEvent event) { if (event.state().blocks().disableStatePersistence()) { // reset the current metadata, we need to start fresh... this.currentMetaData = null; return; } MetaData newMetaData = event.state().metaData(); // we don't check if metaData changed, since we might be called several times and we need to check dangling... boolean success = true; // only applied to master node, writing the global and index level states if (event.state().nodes().localNode().masterNode()) { // check if the global state changed? if (currentMetaData == null || !MetaData.isGlobalStateEquals(currentMetaData, newMetaData)) { try { writeGlobalState("changed", newMetaData, currentMetaData); } catch (Exception e) { success = false; } } // check and write changes in indices for (IndexMetaData indexMetaData : newMetaData) { String writeReason = null; IndexMetaData currentIndexMetaData; if (currentMetaData == null) { // a new event..., check from the state stored currentIndexMetaData = loadIndex(indexMetaData.index()); } else { currentIndexMetaData = currentMetaData.index(indexMetaData.index()); } if (currentIndexMetaData == null) { writeReason = "freshly created"; } else if (currentIndexMetaData.version() != indexMetaData.version()) { writeReason = "version changed from [" + currentIndexMetaData.version() + "] to [" + indexMetaData.version() + "]"; } // we update the writeReason only if we really need to write it if (writeReason == null) { continue; } try { writeIndex(writeReason, indexMetaData, currentIndexMetaData); } catch (Exception e) { success = false; } } } // delete indices that were there before, but are deleted now // we need to do it so they won't be detected as dangling if (nodeEnv.hasNodeFile()) { if (currentMetaData != null) { // only delete indices when we already received a state (currentMetaData != null) // and we had a go at processing dangling indices at least once // this will also delete the _state of the index itself for (IndexMetaData current : currentMetaData) { if (danglingIndices.containsKey(current.index())) { continue; } if (!newMetaData.hasIndex(current.index())) { - logger.debug("[{}] deleting index that is no longer part of the metadata"); + logger.debug("[{}] deleting index that is no longer part of the metadata (indices: [{}])", current.index(), newMetaData.indices().keySet()); FileSystemUtils.deleteRecursively(nodeEnv.indexLocations(new Index(current.index()))); } } } } // handle dangling indices, we handle those for all nodes that have a node file (data or master) if (nodeEnv.hasNodeFile()) { if (danglingTimeout.millis() >= 0) { synchronized (danglingMutex) { for (String danglingIndex : danglingIndices.keySet()) { if (newMetaData.hasIndex(danglingIndex)) { logger.debug("[{}] no longer dangling (created), removing", danglingIndex); DanglingIndex removed = danglingIndices.remove(danglingIndex); removed.future.cancel(false); } } // delete indices that are no longer part of the metadata try { for (String indexName : nodeEnv.findAllIndices()) { // if we have the index on the metadata, don't delete it if (newMetaData.hasIndex(indexName)) { continue; } if (danglingIndices.containsKey(indexName)) { // already dangling, continue continue; } IndexMetaData indexMetaData = loadIndex(indexName); if (indexMetaData != null) { if (danglingTimeout.millis() == 0) { logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, timeout set to 0, deleting now", indexName); FileSystemUtils.deleteRecursively(nodeEnv.indexLocations(new Index(indexName))); } else { logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, scheduling to delete in [{}], auto import to cluster state [{}]", indexName, danglingTimeout, autoImportDangled); danglingIndices.put(indexName, new DanglingIndex(indexName, threadPool.schedule(danglingTimeout, ThreadPool.Names.SAME, new RemoveDanglingIndex(indexName)))); } } } } catch (Exception e) { logger.warn("failed to find dangling indices", e); } } } if (autoImportDangled.shouldImport() && !danglingIndices.isEmpty()) { final List<IndexMetaData> dangled = Lists.newArrayList(); for (String indexName : danglingIndices.keySet()) { IndexMetaData indexMetaData = loadIndex(indexName); if (indexMetaData == null) { logger.debug("failed to find state for dangling index [{}]", indexName); continue; } // we might have someone copying over an index, renaming the directory, handle that if (!indexMetaData.index().equals(indexName)) { logger.info("dangled index directory name is [{}], state name is [{}], renaming to directory name", indexName, indexMetaData.index()); indexMetaData = IndexMetaData.newIndexMetaDataBuilder(indexMetaData).index(indexName).build(); } if (autoImportDangled == AutoImportDangledState.CLOSED) { indexMetaData = IndexMetaData.newIndexMetaDataBuilder(indexMetaData).state(IndexMetaData.State.CLOSE).build(); } if (indexMetaData != null) { dangled.add(indexMetaData); } } IndexMetaData[] dangledIndices = dangled.toArray(new IndexMetaData[dangled.size()]); try { allocateDangledIndices.allocateDangled(dangledIndices, new LocalAllocateDangledIndices.Listener() { @Override public void onResponse(LocalAllocateDangledIndices.AllocateDangledResponse response) { logger.trace("allocated dangled"); } @Override public void onFailure(Throwable e) { logger.info("failed to send allocated dangled", e); } }); } catch (Exception e) { logger.warn("failed to send allocate dangled", e); } } } if (success) { currentMetaData = newMetaData; } } private void deleteIndex(String index) { logger.trace("[{}] delete index state", index); File[] indexLocations = nodeEnv.indexLocations(new Index(index)); for (File indexLocation : indexLocations) { if (!indexLocation.exists()) { continue; } FileSystemUtils.deleteRecursively(new File(indexLocation, "_state")); } } private void writeIndex(String reason, IndexMetaData indexMetaData, @Nullable IndexMetaData previousIndexMetaData) throws Exception { logger.trace("[{}] writing state, reason [{}]", indexMetaData.index(), reason); CachedStreamOutput.Entry cachedEntry = CachedStreamOutput.popEntry(); try { XContentBuilder builder = XContentFactory.contentBuilder(format, cachedEntry.bytes()); builder.startObject(); IndexMetaData.Builder.toXContent(indexMetaData, builder, formatParams); builder.endObject(); builder.flush(); String stateFileName = "state-" + indexMetaData.version(); Exception lastFailure = null; boolean wroteAtLeastOnce = false; for (File indexLocation : nodeEnv.indexLocations(new Index(indexMetaData.index()))) { File stateLocation = new File(indexLocation, "_state"); FileSystemUtils.mkdirs(stateLocation); File stateFile = new File(stateLocation, stateFileName); FileOutputStream fos = null; try { fos = new FileOutputStream(stateFile); BytesReference bytes = cachedEntry.bytes().bytes(); fos.write(bytes.array(), bytes.arrayOffset(), bytes.length()); fos.getChannel().force(true); Closeables.closeQuietly(fos); wroteAtLeastOnce = true; } catch (Exception e) { lastFailure = e; } finally { Closeables.closeQuietly(fos); } } if (!wroteAtLeastOnce) { logger.warn("[{}]: failed to state", lastFailure, indexMetaData.index()); throw new IOException("failed to write state for [" + indexMetaData.index() + "]", lastFailure); } // delete the old files if (previousIndexMetaData != null && previousIndexMetaData.version() != indexMetaData.version()) { for (File indexLocation : nodeEnv.indexLocations(new Index(indexMetaData.index()))) { File[] files = new File(indexLocation, "_state").listFiles(); if (files == null) { continue; } for (File file : files) { if (!file.getName().startsWith("state-")) { continue; } if (file.getName().equals(stateFileName)) { continue; } file.delete(); } } } } finally { CachedStreamOutput.pushEntry(cachedEntry); } } private void writeGlobalState(String reason, MetaData metaData, @Nullable MetaData previousMetaData) throws Exception { logger.trace("[_global] writing state, reason [{}]", reason); // create metadata to write with just the global state MetaData globalMetaData = MetaData.builder().metaData(metaData).removeAllIndices().build(); CachedStreamOutput.Entry cachedEntry = CachedStreamOutput.popEntry(); try { XContentBuilder builder = XContentFactory.contentBuilder(format, cachedEntry.bytes()); builder.startObject(); MetaData.Builder.toXContent(globalMetaData, builder, formatParams); builder.endObject(); builder.flush(); String globalFileName = "global-" + globalMetaData.version(); Exception lastFailure = null; boolean wroteAtLeastOnce = false; for (File dataLocation : nodeEnv.nodeDataLocations()) { File stateLocation = new File(dataLocation, "_state"); FileSystemUtils.mkdirs(stateLocation); File stateFile = new File(stateLocation, globalFileName); FileOutputStream fos = null; try { fos = new FileOutputStream(stateFile); BytesReference bytes = cachedEntry.bytes().bytes(); fos.write(bytes.array(), bytes.arrayOffset(), bytes.length()); fos.getChannel().force(true); Closeables.closeQuietly(fos); wroteAtLeastOnce = true; } catch (Exception e) { lastFailure = e; } finally { Closeables.closeQuietly(fos); } } if (!wroteAtLeastOnce) { logger.warn("[_global]: failed to write global state", lastFailure); throw new IOException("failed to write global state", lastFailure); } // delete the old files for (File dataLocation : nodeEnv.nodeDataLocations()) { File[] files = new File(dataLocation, "_state").listFiles(); if (files == null) { continue; } for (File file : files) { if (!file.getName().startsWith("global-")) { continue; } if (file.getName().equals(globalFileName)) { continue; } file.delete(); } } } finally { CachedStreamOutput.pushEntry(cachedEntry); } } private MetaData loadState() throws Exception { MetaData.Builder metaDataBuilder = MetaData.builder(); MetaData globalMetaData = loadGlobalState(); if (globalMetaData != null) { metaDataBuilder.metaData(globalMetaData); } Set<String> indices = nodeEnv.findAllIndices(); for (String index : indices) { IndexMetaData indexMetaData = loadIndex(index); if (indexMetaData == null) { logger.debug("[{}] failed to find metadata for existing index location", index); } else { metaDataBuilder.put(indexMetaData, false); } } return metaDataBuilder.build(); } @Nullable private IndexMetaData loadIndex(String index) { long highestVersion = -1; IndexMetaData indexMetaData = null; for (File indexLocation : nodeEnv.indexLocations(new Index(index))) { File stateDir = new File(indexLocation, "_state"); if (!stateDir.exists() || !stateDir.isDirectory()) { continue; } // now, iterate over the current versions, and find latest one File[] stateFiles = stateDir.listFiles(); if (stateFiles == null) { continue; } for (File stateFile : stateFiles) { if (!stateFile.getName().startsWith("state-")) { continue; } try { long version = Long.parseLong(stateFile.getName().substring("state-".length())); if (version > highestVersion) { byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile)); if (data.length == 0) { logger.debug("[{}]: no data for [" + stateFile.getAbsolutePath() + "], ignoring...", index); continue; } XContentParser parser = null; try { parser = XContentHelper.createParser(data, 0, data.length); parser.nextToken(); // move to START_OBJECT indexMetaData = IndexMetaData.Builder.fromXContent(parser); highestVersion = version; } finally { if (parser != null) { parser.close(); } } } } catch (Exception e) { logger.debug("[{}]: failed to read [" + stateFile.getAbsolutePath() + "], ignoring...", e, index); } } } return indexMetaData; } private MetaData loadGlobalState() { long highestVersion = -1; MetaData metaData = null; for (File dataLocation : nodeEnv.nodeDataLocations()) { File stateLocation = new File(dataLocation, "_state"); if (!stateLocation.exists()) { continue; } File[] stateFiles = stateLocation.listFiles(); if (stateFiles == null) { continue; } for (File stateFile : stateFiles) { String name = stateFile.getName(); if (!name.startsWith("global-")) { continue; } try { long version = Long.parseLong(stateFile.getName().substring("global-".length())); if (version > highestVersion) { byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile)); if (data.length == 0) { logger.debug("[_global] no data for [" + stateFile.getAbsolutePath() + "], ignoring..."); continue; } XContentParser parser = null; try { parser = XContentHelper.createParser(data, 0, data.length); metaData = MetaData.Builder.fromXContent(parser); highestVersion = version; } finally { if (parser != null) { parser.close(); } } } } catch (Exception e) { logger.debug("failed to load global state from [{}]", e, stateFile.getAbsolutePath()); } } } return metaData; } private void pre019Upgrade() throws Exception { long index = -1; File metaDataFile = null; MetaData metaData = null; long version = -1; for (File dataLocation : nodeEnv.nodeDataLocations()) { File stateLocation = new File(dataLocation, "_state"); if (!stateLocation.exists()) { continue; } File[] stateFiles = stateLocation.listFiles(); if (stateFiles == null) { continue; } for (File stateFile : stateFiles) { if (logger.isTraceEnabled()) { logger.trace("[upgrade]: processing [" + stateFile.getName() + "]"); } String name = stateFile.getName(); if (!name.startsWith("metadata-")) { continue; } long fileIndex = Long.parseLong(name.substring(name.indexOf('-') + 1)); if (fileIndex >= index) { // try and read the meta data try { byte[] data = Streams.copyToByteArray(new FileInputStream(stateFile)); if (data.length == 0) { continue; } XContentParser parser = XContentHelper.createParser(data, 0, data.length); try { String currentFieldName = null; XContentParser.Token token = parser.nextToken(); if (token != null) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("meta-data".equals(currentFieldName)) { metaData = MetaData.Builder.fromXContent(parser); } } else if (token.isValue()) { if ("version".equals(currentFieldName)) { version = parser.longValue(); } } } } } finally { parser.close(); } index = fileIndex; metaDataFile = stateFile; } catch (IOException e) { logger.warn("failed to read pre 0.19 state from [" + name + "], ignoring...", e); } } } } if (metaData == null) { return; } logger.info("found old metadata state, loading metadata from [{}] and converting to new metadata location and strucutre...", metaDataFile.getAbsolutePath()); writeGlobalState("upgrade", MetaData.builder().metaData(metaData).version(version).build(), null); for (IndexMetaData indexMetaData : metaData) { IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.newIndexMetaDataBuilder(indexMetaData).version(version); // set the created version to 0.18 indexMetaDataBuilder.settings(ImmutableSettings.settingsBuilder().put(indexMetaData.settings()).put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_18_0)); writeIndex("upgrade", indexMetaDataBuilder.build(), null); } // rename shards state to backup state File backupFile = new File(metaDataFile.getParentFile(), "backup-" + metaDataFile.getName()); if (!metaDataFile.renameTo(backupFile)) { throw new IOException("failed to rename old state to backup state [" + metaDataFile.getAbsolutePath() + "]"); } // delete all other shards state files for (File dataLocation : nodeEnv.nodeDataLocations()) { File stateLocation = new File(dataLocation, "_state"); if (!stateLocation.exists()) { continue; } File[] stateFiles = stateLocation.listFiles(); if (stateFiles == null) { continue; } for (File stateFile : stateFiles) { String name = stateFile.getName(); if (!name.startsWith("metadata-")) { continue; } stateFile.delete(); } } logger.info("conversion to new metadata location and format done, backup create at [{}]", backupFile.getAbsolutePath()); } class RemoveDanglingIndex implements Runnable { private final String index; RemoveDanglingIndex(String index) { this.index = index; } @Override public void run() { synchronized (danglingMutex) { DanglingIndex remove = danglingIndices.remove(index); // no longer there... if (remove == null) { return; } logger.info("[{}] deleting dangling index", index); FileSystemUtils.deleteRecursively(nodeEnv.indexLocations(new Index(index))); } } } static class DanglingIndex { public final String index; public final ScheduledFuture future; DanglingIndex(String index, ScheduledFuture future) { this.index = index; this.future = future; } } }
true
true
public void clusterChanged(ClusterChangedEvent event) { if (event.state().blocks().disableStatePersistence()) { // reset the current metadata, we need to start fresh... this.currentMetaData = null; return; } MetaData newMetaData = event.state().metaData(); // we don't check if metaData changed, since we might be called several times and we need to check dangling... boolean success = true; // only applied to master node, writing the global and index level states if (event.state().nodes().localNode().masterNode()) { // check if the global state changed? if (currentMetaData == null || !MetaData.isGlobalStateEquals(currentMetaData, newMetaData)) { try { writeGlobalState("changed", newMetaData, currentMetaData); } catch (Exception e) { success = false; } } // check and write changes in indices for (IndexMetaData indexMetaData : newMetaData) { String writeReason = null; IndexMetaData currentIndexMetaData; if (currentMetaData == null) { // a new event..., check from the state stored currentIndexMetaData = loadIndex(indexMetaData.index()); } else { currentIndexMetaData = currentMetaData.index(indexMetaData.index()); } if (currentIndexMetaData == null) { writeReason = "freshly created"; } else if (currentIndexMetaData.version() != indexMetaData.version()) { writeReason = "version changed from [" + currentIndexMetaData.version() + "] to [" + indexMetaData.version() + "]"; } // we update the writeReason only if we really need to write it if (writeReason == null) { continue; } try { writeIndex(writeReason, indexMetaData, currentIndexMetaData); } catch (Exception e) { success = false; } } } // delete indices that were there before, but are deleted now // we need to do it so they won't be detected as dangling if (nodeEnv.hasNodeFile()) { if (currentMetaData != null) { // only delete indices when we already received a state (currentMetaData != null) // and we had a go at processing dangling indices at least once // this will also delete the _state of the index itself for (IndexMetaData current : currentMetaData) { if (danglingIndices.containsKey(current.index())) { continue; } if (!newMetaData.hasIndex(current.index())) { logger.debug("[{}] deleting index that is no longer part of the metadata"); FileSystemUtils.deleteRecursively(nodeEnv.indexLocations(new Index(current.index()))); } } } } // handle dangling indices, we handle those for all nodes that have a node file (data or master) if (nodeEnv.hasNodeFile()) { if (danglingTimeout.millis() >= 0) { synchronized (danglingMutex) { for (String danglingIndex : danglingIndices.keySet()) { if (newMetaData.hasIndex(danglingIndex)) { logger.debug("[{}] no longer dangling (created), removing", danglingIndex); DanglingIndex removed = danglingIndices.remove(danglingIndex); removed.future.cancel(false); } } // delete indices that are no longer part of the metadata try { for (String indexName : nodeEnv.findAllIndices()) { // if we have the index on the metadata, don't delete it if (newMetaData.hasIndex(indexName)) { continue; } if (danglingIndices.containsKey(indexName)) { // already dangling, continue continue; } IndexMetaData indexMetaData = loadIndex(indexName); if (indexMetaData != null) { if (danglingTimeout.millis() == 0) { logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, timeout set to 0, deleting now", indexName); FileSystemUtils.deleteRecursively(nodeEnv.indexLocations(new Index(indexName))); } else { logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, scheduling to delete in [{}], auto import to cluster state [{}]", indexName, danglingTimeout, autoImportDangled); danglingIndices.put(indexName, new DanglingIndex(indexName, threadPool.schedule(danglingTimeout, ThreadPool.Names.SAME, new RemoveDanglingIndex(indexName)))); } } } } catch (Exception e) { logger.warn("failed to find dangling indices", e); } } } if (autoImportDangled.shouldImport() && !danglingIndices.isEmpty()) { final List<IndexMetaData> dangled = Lists.newArrayList(); for (String indexName : danglingIndices.keySet()) { IndexMetaData indexMetaData = loadIndex(indexName); if (indexMetaData == null) { logger.debug("failed to find state for dangling index [{}]", indexName); continue; } // we might have someone copying over an index, renaming the directory, handle that if (!indexMetaData.index().equals(indexName)) { logger.info("dangled index directory name is [{}], state name is [{}], renaming to directory name", indexName, indexMetaData.index()); indexMetaData = IndexMetaData.newIndexMetaDataBuilder(indexMetaData).index(indexName).build(); } if (autoImportDangled == AutoImportDangledState.CLOSED) { indexMetaData = IndexMetaData.newIndexMetaDataBuilder(indexMetaData).state(IndexMetaData.State.CLOSE).build(); } if (indexMetaData != null) { dangled.add(indexMetaData); } } IndexMetaData[] dangledIndices = dangled.toArray(new IndexMetaData[dangled.size()]); try { allocateDangledIndices.allocateDangled(dangledIndices, new LocalAllocateDangledIndices.Listener() { @Override public void onResponse(LocalAllocateDangledIndices.AllocateDangledResponse response) { logger.trace("allocated dangled"); } @Override public void onFailure(Throwable e) { logger.info("failed to send allocated dangled", e); } }); } catch (Exception e) { logger.warn("failed to send allocate dangled", e); } } } if (success) { currentMetaData = newMetaData; } }
public void clusterChanged(ClusterChangedEvent event) { if (event.state().blocks().disableStatePersistence()) { // reset the current metadata, we need to start fresh... this.currentMetaData = null; return; } MetaData newMetaData = event.state().metaData(); // we don't check if metaData changed, since we might be called several times and we need to check dangling... boolean success = true; // only applied to master node, writing the global and index level states if (event.state().nodes().localNode().masterNode()) { // check if the global state changed? if (currentMetaData == null || !MetaData.isGlobalStateEquals(currentMetaData, newMetaData)) { try { writeGlobalState("changed", newMetaData, currentMetaData); } catch (Exception e) { success = false; } } // check and write changes in indices for (IndexMetaData indexMetaData : newMetaData) { String writeReason = null; IndexMetaData currentIndexMetaData; if (currentMetaData == null) { // a new event..., check from the state stored currentIndexMetaData = loadIndex(indexMetaData.index()); } else { currentIndexMetaData = currentMetaData.index(indexMetaData.index()); } if (currentIndexMetaData == null) { writeReason = "freshly created"; } else if (currentIndexMetaData.version() != indexMetaData.version()) { writeReason = "version changed from [" + currentIndexMetaData.version() + "] to [" + indexMetaData.version() + "]"; } // we update the writeReason only if we really need to write it if (writeReason == null) { continue; } try { writeIndex(writeReason, indexMetaData, currentIndexMetaData); } catch (Exception e) { success = false; } } } // delete indices that were there before, but are deleted now // we need to do it so they won't be detected as dangling if (nodeEnv.hasNodeFile()) { if (currentMetaData != null) { // only delete indices when we already received a state (currentMetaData != null) // and we had a go at processing dangling indices at least once // this will also delete the _state of the index itself for (IndexMetaData current : currentMetaData) { if (danglingIndices.containsKey(current.index())) { continue; } if (!newMetaData.hasIndex(current.index())) { logger.debug("[{}] deleting index that is no longer part of the metadata (indices: [{}])", current.index(), newMetaData.indices().keySet()); FileSystemUtils.deleteRecursively(nodeEnv.indexLocations(new Index(current.index()))); } } } } // handle dangling indices, we handle those for all nodes that have a node file (data or master) if (nodeEnv.hasNodeFile()) { if (danglingTimeout.millis() >= 0) { synchronized (danglingMutex) { for (String danglingIndex : danglingIndices.keySet()) { if (newMetaData.hasIndex(danglingIndex)) { logger.debug("[{}] no longer dangling (created), removing", danglingIndex); DanglingIndex removed = danglingIndices.remove(danglingIndex); removed.future.cancel(false); } } // delete indices that are no longer part of the metadata try { for (String indexName : nodeEnv.findAllIndices()) { // if we have the index on the metadata, don't delete it if (newMetaData.hasIndex(indexName)) { continue; } if (danglingIndices.containsKey(indexName)) { // already dangling, continue continue; } IndexMetaData indexMetaData = loadIndex(indexName); if (indexMetaData != null) { if (danglingTimeout.millis() == 0) { logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, timeout set to 0, deleting now", indexName); FileSystemUtils.deleteRecursively(nodeEnv.indexLocations(new Index(indexName))); } else { logger.info("[{}] dangling index, exists on local file system, but not in cluster metadata, scheduling to delete in [{}], auto import to cluster state [{}]", indexName, danglingTimeout, autoImportDangled); danglingIndices.put(indexName, new DanglingIndex(indexName, threadPool.schedule(danglingTimeout, ThreadPool.Names.SAME, new RemoveDanglingIndex(indexName)))); } } } } catch (Exception e) { logger.warn("failed to find dangling indices", e); } } } if (autoImportDangled.shouldImport() && !danglingIndices.isEmpty()) { final List<IndexMetaData> dangled = Lists.newArrayList(); for (String indexName : danglingIndices.keySet()) { IndexMetaData indexMetaData = loadIndex(indexName); if (indexMetaData == null) { logger.debug("failed to find state for dangling index [{}]", indexName); continue; } // we might have someone copying over an index, renaming the directory, handle that if (!indexMetaData.index().equals(indexName)) { logger.info("dangled index directory name is [{}], state name is [{}], renaming to directory name", indexName, indexMetaData.index()); indexMetaData = IndexMetaData.newIndexMetaDataBuilder(indexMetaData).index(indexName).build(); } if (autoImportDangled == AutoImportDangledState.CLOSED) { indexMetaData = IndexMetaData.newIndexMetaDataBuilder(indexMetaData).state(IndexMetaData.State.CLOSE).build(); } if (indexMetaData != null) { dangled.add(indexMetaData); } } IndexMetaData[] dangledIndices = dangled.toArray(new IndexMetaData[dangled.size()]); try { allocateDangledIndices.allocateDangled(dangledIndices, new LocalAllocateDangledIndices.Listener() { @Override public void onResponse(LocalAllocateDangledIndices.AllocateDangledResponse response) { logger.trace("allocated dangled"); } @Override public void onFailure(Throwable e) { logger.info("failed to send allocated dangled", e); } }); } catch (Exception e) { logger.warn("failed to send allocate dangled", e); } } } if (success) { currentMetaData = newMetaData; } }
diff --git a/beddit_alarm/src/ohtu/beddit/activity/MainActivity.java b/beddit_alarm/src/ohtu/beddit/activity/MainActivity.java index fed1b7c..c4eddcb 100755 --- a/beddit_alarm/src/ohtu/beddit/activity/MainActivity.java +++ b/beddit_alarm/src/ohtu/beddit/activity/MainActivity.java @@ -1,242 +1,243 @@ package ohtu.beddit.activity; import android.app.Activity; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.graphics.Color; import android.os.Bundle; import android.preference.PreferenceManager; import android.provider.Settings; import android.text.format.DateFormat; import android.view.*; import android.util.Log; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.Button; import android.view.View.OnClickListener; import android.widget.Toast; import ohtu.beddit.R; import ohtu.beddit.alarm.AlarmService; import ohtu.beddit.alarm.AlarmServiceImpl; import ohtu.beddit.alarm.AlarmTimeChangedListener; import ohtu.beddit.alarm.AlarmTimePicker; import ohtu.beddit.views.timepicker.CustomTimePicker; import ohtu.beddit.io.PreferenceService; public class MainActivity extends Activity implements AlarmTimeChangedListener { private AlarmService alarmService; private AlarmTimePicker alarmTimePicker; private Button addAlarmButton; private Button deleteAlarmButton; /** Called when the alarm is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); setAlarmService(new AlarmServiceImpl(this)); //initialize default values for settings if called for the first time PreferenceManager.setDefaultValues(this, R.xml.prefs, false); + PreferenceManager.setDefaultValues(this, R.xml.advancedprefs, false); setUI(); // Update buttons and clock handles updateButtons(); setClockHands(); boolean debugWeb = false; String token = PreferenceService.getSettingString(this, R.string.pref_key_userToken); if (token != null){ Log.v("Token:", token); } if ( (token == null || token.equals("")) && debugWeb) { Intent myIntent = new Intent(this, AuthActivity.class); startActivityForResult(myIntent,2); } myToast = Toast.makeText(getBaseContext(), "", Toast.LENGTH_SHORT); } @Override public void onResume(){ super.onResume(); updateButtons(); updateColours(); update24HourMode(); } private void setUI() { //Set clock, buttons and listeners alarmTimePicker = (CustomTimePicker)this.findViewById(R.id.alarmTimePicker); alarmTimePicker.addAlarmTimeChangedListener(this); addAlarmButton = (Button) findViewById(R.id.setAlarmButton); addAlarmButton.setOnClickListener(new AlarmSetButtonClickListener()); deleteAlarmButton = (Button)findViewById(R.id.deleteAlarmButton); deleteAlarmButton.setOnClickListener(new AlarmDeleteButtonClickListener()); updateColours(); } private void updateColours(){ String theme = PreferenceService.getSettingString(this, R.string.pref_key_colour_theme); LinearLayout layout = (LinearLayout)findViewById(R.id.mainLayout); if(theme.equals("dark")){ layout.setBackgroundColor(Color.BLACK); alarmTimePicker.setBackgroundColor(Color.BLACK); alarmTimePicker.setForegroundColor(Color.WHITE); alarmTimePicker.setSpecialColor(Color.argb(255,255,89,0)); } else if(theme.equals("light")){ layout.setBackgroundColor(Color.WHITE); alarmTimePicker.setBackgroundColor(Color.WHITE); alarmTimePicker.setForegroundColor(Color.BLACK); alarmTimePicker.setSpecialColor(Color.argb(255,255,89,0)); } } private void update24HourMode(){ alarmTimePicker.set24HourMode(DateFormat.is24HourFormat(this)); } private void setClockHands() { alarmTimePicker.setHours(alarmService.getAlarmHours(this)); alarmTimePicker.setMinutes(alarmService.getAlarmMinutes(this)); alarmTimePicker.setInterval(alarmService.getAlarmInterval(this)); } @Override public void onAlarmTimeChanged(int hours, int minutes, int interval) { alarmService.changeAlarm(this, hours, minutes, interval); if (alarmService.isAlarmSet(this)) showMeTheToast(getString(R.string.toast_alarmupdated)); } private Toast myToast = null; private void showMeTheToast(final String message) { runOnUiThread(new Runnable() { @Override public void run() { myToast.setText(message); myToast.setGravity(Gravity.TOP | Gravity.RIGHT, 0, 0); myToast.show(); } }); } public class AlarmSetButtonClickListener implements OnClickListener { @Override public void onClick(View view) { alarmService.addAlarm(MainActivity.this, alarmTimePicker.getHours(), alarmTimePicker.getMinutes(), alarmTimePicker.getInterval()); MainActivity.this.updateButtons(); // Tell the user about what we did. showMeTheToast(getString(R.string.toast_alarmset)); } } public class AlarmDeleteButtonClickListener implements OnClickListener { @Override public void onClick(View view) { alarmService.deleteAlarm(MainActivity.this); MainActivity.this.updateButtons(); showMeTheToast(getString(R.string.toast_alarmremoved)); } } public class backButtonlisten { public void onBack(View view) { MainActivity.this.finish(); } } // Set buttons to on/off public void updateButtons(){ if (alarmService.isAlarmSet(this.getApplicationContext())){ addAlarmButton.setEnabled(false); deleteAlarmButton.setEnabled(true); } else { addAlarmButton.setEnabled(true); deleteAlarmButton.setEnabled(false); } Log.v("User Interface", "Buttons updated"); } @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.options_menu, menu); MenuItem settings = menu.findItem(R.id.settings_menu_button); settings.setIntent(new Intent(this.getApplicationContext(), SettingsActivity.class)); MenuItem help = menu.findItem(R.id.help_menu_button); help.setIntent(new Intent(this.getApplicationContext(), HelpActivity.class)); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.settings_menu_button: startActivity(item.getIntent()); break; case R.id.help_menu_button: startActivity(item.getIntent()); break; } return true; } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); switch(requestCode) { case (2) : { if (resultCode == Activity.RESULT_OK) { Log.v("MainActivity", "We got message to finish main."); this.finish(); } break; } } } // These methods are for tests public void setAlarmService(AlarmService alarmService) { this.alarmService = alarmService; } public AlarmService getAlarmService() { return alarmService; } }
true
true
public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); setAlarmService(new AlarmServiceImpl(this)); //initialize default values for settings if called for the first time PreferenceManager.setDefaultValues(this, R.xml.prefs, false); setUI(); // Update buttons and clock handles updateButtons(); setClockHands(); boolean debugWeb = false; String token = PreferenceService.getSettingString(this, R.string.pref_key_userToken); if (token != null){ Log.v("Token:", token); } if ( (token == null || token.equals("")) && debugWeb) { Intent myIntent = new Intent(this, AuthActivity.class); startActivityForResult(myIntent,2); } myToast = Toast.makeText(getBaseContext(), "", Toast.LENGTH_SHORT); }
public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); setAlarmService(new AlarmServiceImpl(this)); //initialize default values for settings if called for the first time PreferenceManager.setDefaultValues(this, R.xml.prefs, false); PreferenceManager.setDefaultValues(this, R.xml.advancedprefs, false); setUI(); // Update buttons and clock handles updateButtons(); setClockHands(); boolean debugWeb = false; String token = PreferenceService.getSettingString(this, R.string.pref_key_userToken); if (token != null){ Log.v("Token:", token); } if ( (token == null || token.equals("")) && debugWeb) { Intent myIntent = new Intent(this, AuthActivity.class); startActivityForResult(myIntent,2); } myToast = Toast.makeText(getBaseContext(), "", Toast.LENGTH_SHORT); }
diff --git a/src/main/java/net/lahwran/bukkit/jython/PythonCommandHandler.java b/src/main/java/net/lahwran/bukkit/jython/PythonCommandHandler.java index be4b2eb..e05f34e 100644 --- a/src/main/java/net/lahwran/bukkit/jython/PythonCommandHandler.java +++ b/src/main/java/net/lahwran/bukkit/jython/PythonCommandHandler.java @@ -1,77 +1,77 @@ package net.lahwran.bukkit.jython; import org.bukkit.command.Command; import org.bukkit.command.CommandExecutor; import org.bukkit.command.CommandSender; import org.bukkit.command.PluginCommand; import org.python.core.*; /** * Class to wrap python functions so that they can be used to handle commands * @author lahwran */ public class PythonCommandHandler implements CommandExecutor { private final PyObject func; private final String name; private int argcount = -1; /** * @param func function to handle * @param name name of command to use when registering */ public PythonCommandHandler(PyObject func, String name) { this.func = func; this.name = name; } /** * @param pythonPlugin plugin to register command to */ void register(PythonPlugin pythonPlugin) { PluginCommand command = pythonPlugin.getCommand(name); if (command == null) throw new IllegalArgumentException("Command '"+name+"' not found in plugin " + pythonPlugin.getDescription().getName()); command.setExecutor(this); } private boolean call(int argcount, CommandSender sender, Command command, String label, String[] args) { PyObject[] handlerargs; if (argcount == 4) { handlerargs = new PyObject[] { Py.java2py(sender), Py.java2py(command), Py.java2py(label), Py.java2py(args)}; } else if (argcount == 3) { handlerargs = new PyObject[] { Py.java2py(sender), Py.java2py(label), Py.java2py(args)}; } else if (argcount == 2) { handlerargs = new PyObject[] { Py.java2py(sender), Py.java2py(args)}; } else throw new IllegalArgumentException("this can't happen unless you stick your fingers in my code, which obviously you did, so howabout you undo it?"); return func.__call__(handlerargs).__nonzero__(); } @Override public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { boolean result; if (argcount == -1) { try { result = call(4, sender, command, label, args); argcount = 4; } catch (PyException e) { //this could goof up someone ... they'll probably yell at us and eventually read this code ... fuck them - if (e.type == Py.TypeError && e.value.toString().endsWith("takes exactly 3 arguments (4 given)")) { + if (e.type == Py.TypeError && (e.value.toString().endsWith("takes exactly 3 arguments (4 given)") || e.value.toString().endsWith("takes exactly 4 arguments (5 given)"))) { result = call(3, sender, command, label, args); argcount = 3; - } else if (e.type == Py.TypeError && e.value.toString().endsWith("takes exactly 2 arguments (4 given)")) { + } else if (e.type == Py.TypeError && (e.value.toString().endsWith("takes exactly 2 arguments (4 given)") || e.value.toString().endsWith("takes exactly 3 arguments (5 given)"))) { result = call(2, sender, command, label, args); argcount = 2; } else { throw e; } } } else { result = call(argcount, sender, command, label, args); } return result; } }
false
true
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { boolean result; if (argcount == -1) { try { result = call(4, sender, command, label, args); argcount = 4; } catch (PyException e) { //this could goof up someone ... they'll probably yell at us and eventually read this code ... fuck them if (e.type == Py.TypeError && e.value.toString().endsWith("takes exactly 3 arguments (4 given)")) { result = call(3, sender, command, label, args); argcount = 3; } else if (e.type == Py.TypeError && e.value.toString().endsWith("takes exactly 2 arguments (4 given)")) { result = call(2, sender, command, label, args); argcount = 2; } else { throw e; } } } else { result = call(argcount, sender, command, label, args); } return result; }
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { boolean result; if (argcount == -1) { try { result = call(4, sender, command, label, args); argcount = 4; } catch (PyException e) { //this could goof up someone ... they'll probably yell at us and eventually read this code ... fuck them if (e.type == Py.TypeError && (e.value.toString().endsWith("takes exactly 3 arguments (4 given)") || e.value.toString().endsWith("takes exactly 4 arguments (5 given)"))) { result = call(3, sender, command, label, args); argcount = 3; } else if (e.type == Py.TypeError && (e.value.toString().endsWith("takes exactly 2 arguments (4 given)") || e.value.toString().endsWith("takes exactly 3 arguments (5 given)"))) { result = call(2, sender, command, label, args); argcount = 2; } else { throw e; } } } else { result = call(argcount, sender, command, label, args); } return result; }
diff --git a/components/bio-formats/src/loci/formats/in/NikonReader.java b/components/bio-formats/src/loci/formats/in/NikonReader.java index 5c0371978..c8bcabe1f 100644 --- a/components/bio-formats/src/loci/formats/in/NikonReader.java +++ b/components/bio-formats/src/loci/formats/in/NikonReader.java @@ -1,455 +1,457 @@ // // NikonReader.java // /* OME Bio-Formats package for reading and converting biological file formats. Copyright (C) 2005-@year@ UW-Madison LOCI and Glencoe Software, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package loci.formats.in; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import loci.common.RandomAccessInputStream; import loci.formats.FormatException; import loci.formats.FormatTools; import loci.formats.ImageTools; import loci.formats.MetadataTools; import loci.formats.codec.BitBuffer; import loci.formats.codec.NikonCodec; import loci.formats.codec.NikonCodecOptions; import loci.formats.meta.MetadataStore; import loci.formats.tiff.IFD; import loci.formats.tiff.IFDList; import loci.formats.tiff.PhotoInterp; import loci.formats.tiff.TiffCompression; import loci.formats.tiff.TiffParser; import loci.formats.tiff.TiffRational; /** * NikonReader is the file format reader for Nikon NEF (TIFF) files. * * <dl><dt><b>Source code:</b></dt> * <dd><a href="http://trac.openmicroscopy.org.uk/ome/browser/bioformats.git/components/bio-formats/src/loci/formats/in/NikonReader.java">Trac</a>, * <a href="http://git.openmicroscopy.org/?p=bioformats.git;a=blob;f=components/bio-formats/src/loci/formats/in/NikonReader.java;hb=HEAD">Gitweb</a></dd></dl> * * @author Melissa Linkert melissa at glencoesoftware.com */ public class NikonReader extends BaseTiffReader { // -- Constants -- /** Logger for this class. */ private static final Logger LOGGER = LoggerFactory.getLogger(NikonReader.class); public static final String[] NEF_SUFFIX = {"nef"}; // Tags that give a good indication of whether this is an NEF file. private static final int TIFF_EPS_STANDARD = 37398; private static final int COLOR_MAP = 33422; // Maker Note tags. private static final int FIRMWARE_VERSION = 1; private static final int ISO = 2; private static final int QUALITY = 4; private static final int MAKER_WHITE_BALANCE = 5; private static final int SHARPENING = 6; private static final int FOCUS_MODE = 7; private static final int FLASH_SETTING = 8; private static final int FLASH_MODE = 9; private static final int WHITE_BALANCE_FINE = 11; private static final int WHITE_BALANCE_RGB_COEFFS = 12; private static final int FLASH_COMPENSATION = 18; private static final int TONE_COMPENSATION = 129; private static final int LENS_TYPE = 131; private static final int LENS = 132; private static final int FLASH_USED = 135; private static final int CURVE = 140; private static final int COLOR_MODE = 141; private static final int LIGHT_TYPE = 144; private static final int HUE = 146; private static final int CAPTURE_EDITOR_DATA = 3585; // -- Fields -- /** Offset to the Nikon Maker Note. */ protected int makerNoteOffset; /** The original IFD. */ protected IFD original; private TiffRational[] whiteBalance; private Object cfaPattern; private int[] curve; private int[] vPredictor; private boolean lossyCompression; private int split = -1; private byte[] lastPlane = null; private int lastIndex = -1; // -- Constructor -- /** Constructs a new Nikon reader. */ public NikonReader() { super("Nikon NEF", new String[] {"nef", "tif", "tiff"}); suffixSufficient = false; domains = new String[] {FormatTools.GRAPHICS_DOMAIN}; } // -- IFormatReader API methods -- /* @see loci.formats.IFormatReader#isThisType(String, boolean) */ public boolean isThisType(String name, boolean open) { // extension is sufficient as long as it is NEF if (checkSuffix(name, NEF_SUFFIX)) return true; return super.isThisType(name, open); } /* @see loci.formats.IFormatReader#isThisType(RandomAccessInputStream) */ public boolean isThisType(RandomAccessInputStream stream) throws IOException { TiffParser tp = new TiffParser(stream); IFD ifd = tp.getFirstIFD(); if (ifd == null) return false; if (ifd.containsKey(TIFF_EPS_STANDARD)) return true; String make = ifd.getIFDTextValue(IFD.MAKE); return make != null && make.indexOf("Nikon") != -1; } /** * @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); IFD ifd = ifds.get(no); int[] bps = ifd.getBitsPerSample(); int dataSize = bps[0]; long[] byteCounts = ifd.getStripByteCounts(); long totalBytes = 0; for (long b : byteCounts) { totalBytes += b; } if (totalBytes == FormatTools.getPlaneSize(this) || bps.length > 1) { return super.openBytes(no, buf, x, y, w, h); } if (lastPlane == null || lastIndex != no) { long[] offsets = ifd.getStripOffsets(); boolean maybeCompressed = ifd.getCompression() == TiffCompression.NIKON; boolean compressed = vPredictor != null && curve != null && maybeCompressed; if (!maybeCompressed && dataSize == 14) dataSize = 16; ByteArrayOutputStream src = new ByteArrayOutputStream(); NikonCodec codec = new NikonCodec(); NikonCodecOptions options = new NikonCodecOptions(); options.width = getSizeX(); options.height = getSizeY(); options.bitsPerSample = dataSize; options.curve = curve; if (vPredictor != null) { options.vPredictor = new int[vPredictor.length]; } options.lossless = !lossyCompression; options.split = split; for (int i=0; i<byteCounts.length; i++) { byte[] t = new byte[(int) byteCounts[i]]; in.seek(offsets[i]); in.read(t); if (compressed) { options.maxBytes = (int) byteCounts[i]; System.arraycopy(vPredictor, 0, options.vPredictor, 0, vPredictor.length); t = codec.decompress(t, options); } src.write(t); } BitBuffer bb = new BitBuffer(src.toByteArray()); short[] pix = new short[getSizeX() * getSizeY() * 3]; src.close(); int[] colorMap = {1, 0, 2, 1}; // default color map short[] ifdColors = (short[]) ifd.get(COLOR_MAP); + boolean colorsValid = false; if (ifdColors != null && ifdColors.length >= colorMap.length) { - boolean colorsValid = true; + colorsValid = true; for (int q=0; q<colorMap.length; q++) { if (ifdColors[q] < 0 || ifdColors[q] > 2) { // found invalid channel index, use default color map instead colorsValid = false; break; } } if (colorsValid) { for (int q=0; q<colorMap.length; q++) { colorMap[q] = ifdColors[q]; } } } - boolean interleaveRows = offsets.length == 1 && !maybeCompressed; + boolean interleaveRows = + offsets.length == 1 && !maybeCompressed && !colorsValid; for (int row=0; row<getSizeY(); row++) { int realRow = interleaveRows ? (row < (getSizeY() / 2) ? row * 2 : (row - (getSizeY() / 2)) * 2 + 1) : row; for (int col=0; col<getSizeX(); col++) { short val = (short) (bb.getBits(dataSize) & 0xffff); int mapIndex = (realRow % 2) * 2 + (col % 2); int redOffset = realRow * getSizeX() + col; int greenOffset = (getSizeY() + realRow) * getSizeX() + col; int blueOffset = (2 * getSizeY() + realRow) * getSizeX() + col; if (colorMap[mapIndex] == 0) { pix[redOffset] = adjustForWhiteBalance(val, 0); } else if (colorMap[mapIndex] == 1) { pix[greenOffset] = adjustForWhiteBalance(val, 1); } else if (colorMap[mapIndex] == 2) { pix[blueOffset] = adjustForWhiteBalance(val, 2); } if (maybeCompressed && !compressed) { int toSkip = 0; if ((col % 10) == 9) { toSkip = 1; } if (col == getSizeX() - 1) { toSkip = 10; } bb.skipBits(toSkip * 8); } } } lastPlane = new byte[FormatTools.getPlaneSize(this)]; ImageTools.interpolate(pix, lastPlane, colorMap, getSizeX(), getSizeY(), isLittleEndian()); lastIndex = no; } int bpp = FormatTools.getBytesPerPixel(getPixelType()) * 3; int rowLen = w * bpp; int width = getSizeX() * bpp; for (int row=0; row<h; row++) { System.arraycopy( lastPlane, (row + y) * width + x * bpp, buf, row * rowLen, rowLen); } return buf; } /* @see loci.formats.IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (!fileOnly) { makerNoteOffset = 0; original = null; split = -1; whiteBalance = null; cfaPattern = null; curve = null; vPredictor = null; lossyCompression = false; lastPlane = null; lastIndex = -1; } } // -- Internal BaseTiffReader API methods -- /* @see BaseTiffReader#initStandardMetadata() */ protected void initStandardMetadata() throws FormatException, IOException { super.initStandardMetadata(); // reset image dimensions // the actual image data is stored in IFDs referenced by the SubIFD tag // in the 'real' IFD core[0].imageCount = ifds.size(); IFD firstIFD = ifds.get(0); PhotoInterp photo = firstIFD.getPhotometricInterpretation(); int samples = firstIFD.getSamplesPerPixel(); core[0].rgb = samples > 1 || photo == PhotoInterp.RGB || photo == PhotoInterp.CFA_ARRAY; if (photo == PhotoInterp.CFA_ARRAY) samples = 3; core[0].sizeX = (int) firstIFD.getImageWidth(); core[0].sizeY = (int) firstIFD.getImageLength(); core[0].sizeZ = 1; core[0].sizeC = isRGB() ? samples : 1; core[0].sizeT = ifds.size(); core[0].pixelType = firstIFD.getPixelType(); core[0].indexed = false; // now look for the EXIF IFD pointer IFDList exifIFDs = tiffParser.getExifIFDs(); if (exifIFDs.size() > 0) { IFD exifIFD = exifIFDs.get(0); tiffParser.fillInIFD(exifIFD); // put all the EXIF data in the metadata hashtable for (Integer key : exifIFD.keySet()) { int tag = key.intValue(); String name = IFD.getIFDTagName(tag); if (tag == IFD.CFA_PATTERN) { byte[] cfa = (byte[]) exifIFD.get(key); int[] colorMap = new int[cfa.length]; for (int i=0; i<cfa.length; i++) colorMap[i] = (int) cfa[i]; addGlobalMeta(name, colorMap); cfaPattern = colorMap; } else { addGlobalMeta(name, exifIFD.get(key)); if (name.equals("MAKER_NOTE")) { byte[] b = (byte[]) exifIFD.get(key); int extra = new String(b, 0, 10).startsWith("Nikon") ? 10 : 0; byte[] buf = new byte[b.length]; System.arraycopy(b, extra, buf, 0, buf.length - extra); RandomAccessInputStream makerNote = new RandomAccessInputStream(buf); TiffParser tp = new TiffParser(makerNote); IFD note = null; try { note = tp.getFirstIFD(); } catch (Exception e) { LOGGER.debug("Failed to parse first IFD", e); } if (note != null) { for (Integer nextKey : note.keySet()) { int nextTag = nextKey.intValue(); addGlobalMeta(name, note.get(nextKey)); if (nextTag == 150) { b = (byte[]) note.get(nextKey); RandomAccessInputStream s = new RandomAccessInputStream(b); byte check1 = s.readByte(); byte check2 = s.readByte(); lossyCompression = check1 != 0x46; vPredictor = new int[4]; for (int q=0; q<vPredictor.length; q++) { vPredictor[q] = s.readShort(); } curve = new int[16385]; int bps = ifds.get(0).getBitsPerSample()[0]; int max = 1 << bps & 0x7fff; int step = 0; int csize = s.readShort(); if (csize > 1) { step = max / (csize - 1); } if (check1 == 0x44 && check2 == 0x20 && step > 0) { for (int i=0; i<csize; i++) { curve[i * step] = s.readShort(); } for (int i=0; i<max; i++) { int n = i % step; curve[i] = (curve[i - n] * (step - n) + curve[i - n + step] * n) / step; } s.seek(562); split = s.readShort(); } else { int maxValue = (int) Math.pow(2, bps) - 1; Arrays.fill(curve, maxValue); int nElements = (int) (s.length() - s.getFilePointer()) / 2; if (nElements < 100) { for (int i=0; i<curve.length; i++) { curve[i] = (short) i; } } else { for (int q=0; q<nElements; q++) { curve[q] = s.readShort(); } } } s.close(); } else if (nextTag == WHITE_BALANCE_RGB_COEFFS) { whiteBalance = (TiffRational[]) note.get(nextKey); } } } makerNote.close(); } } } } } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { super.initFile(id); original = ifds.get(0); if (cfaPattern != null) { original.putIFDValue(IFD.COLOR_MAP, (int[]) cfaPattern); } ifds.set(0, original); core[0].imageCount = 1; core[0].sizeT = 1; if (ifds.get(0).getSamplesPerPixel() == 1) { core[0].interleaved = true; } MetadataStore store = makeFilterMetadata(); MetadataTools.populatePixels(store, this); } // -- Helper methods -- private short adjustForWhiteBalance(short val, int index) { if (whiteBalance != null && whiteBalance.length == 3) { return (short) (val * whiteBalance[index].doubleValue()); } return val; } }
false
true
public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); IFD ifd = ifds.get(no); int[] bps = ifd.getBitsPerSample(); int dataSize = bps[0]; long[] byteCounts = ifd.getStripByteCounts(); long totalBytes = 0; for (long b : byteCounts) { totalBytes += b; } if (totalBytes == FormatTools.getPlaneSize(this) || bps.length > 1) { return super.openBytes(no, buf, x, y, w, h); } if (lastPlane == null || lastIndex != no) { long[] offsets = ifd.getStripOffsets(); boolean maybeCompressed = ifd.getCompression() == TiffCompression.NIKON; boolean compressed = vPredictor != null && curve != null && maybeCompressed; if (!maybeCompressed && dataSize == 14) dataSize = 16; ByteArrayOutputStream src = new ByteArrayOutputStream(); NikonCodec codec = new NikonCodec(); NikonCodecOptions options = new NikonCodecOptions(); options.width = getSizeX(); options.height = getSizeY(); options.bitsPerSample = dataSize; options.curve = curve; if (vPredictor != null) { options.vPredictor = new int[vPredictor.length]; } options.lossless = !lossyCompression; options.split = split; for (int i=0; i<byteCounts.length; i++) { byte[] t = new byte[(int) byteCounts[i]]; in.seek(offsets[i]); in.read(t); if (compressed) { options.maxBytes = (int) byteCounts[i]; System.arraycopy(vPredictor, 0, options.vPredictor, 0, vPredictor.length); t = codec.decompress(t, options); } src.write(t); } BitBuffer bb = new BitBuffer(src.toByteArray()); short[] pix = new short[getSizeX() * getSizeY() * 3]; src.close(); int[] colorMap = {1, 0, 2, 1}; // default color map short[] ifdColors = (short[]) ifd.get(COLOR_MAP); if (ifdColors != null && ifdColors.length >= colorMap.length) { boolean colorsValid = true; for (int q=0; q<colorMap.length; q++) { if (ifdColors[q] < 0 || ifdColors[q] > 2) { // found invalid channel index, use default color map instead colorsValid = false; break; } } if (colorsValid) { for (int q=0; q<colorMap.length; q++) { colorMap[q] = ifdColors[q]; } } } boolean interleaveRows = offsets.length == 1 && !maybeCompressed; for (int row=0; row<getSizeY(); row++) { int realRow = interleaveRows ? (row < (getSizeY() / 2) ? row * 2 : (row - (getSizeY() / 2)) * 2 + 1) : row; for (int col=0; col<getSizeX(); col++) { short val = (short) (bb.getBits(dataSize) & 0xffff); int mapIndex = (realRow % 2) * 2 + (col % 2); int redOffset = realRow * getSizeX() + col; int greenOffset = (getSizeY() + realRow) * getSizeX() + col; int blueOffset = (2 * getSizeY() + realRow) * getSizeX() + col; if (colorMap[mapIndex] == 0) { pix[redOffset] = adjustForWhiteBalance(val, 0); } else if (colorMap[mapIndex] == 1) { pix[greenOffset] = adjustForWhiteBalance(val, 1); } else if (colorMap[mapIndex] == 2) { pix[blueOffset] = adjustForWhiteBalance(val, 2); } if (maybeCompressed && !compressed) { int toSkip = 0; if ((col % 10) == 9) { toSkip = 1; } if (col == getSizeX() - 1) { toSkip = 10; } bb.skipBits(toSkip * 8); } } } lastPlane = new byte[FormatTools.getPlaneSize(this)]; ImageTools.interpolate(pix, lastPlane, colorMap, getSizeX(), getSizeY(), isLittleEndian()); lastIndex = no; } int bpp = FormatTools.getBytesPerPixel(getPixelType()) * 3; int rowLen = w * bpp; int width = getSizeX() * bpp; for (int row=0; row<h; row++) { System.arraycopy( lastPlane, (row + y) * width + x * bpp, buf, row * rowLen, rowLen); } return buf; }
public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); IFD ifd = ifds.get(no); int[] bps = ifd.getBitsPerSample(); int dataSize = bps[0]; long[] byteCounts = ifd.getStripByteCounts(); long totalBytes = 0; for (long b : byteCounts) { totalBytes += b; } if (totalBytes == FormatTools.getPlaneSize(this) || bps.length > 1) { return super.openBytes(no, buf, x, y, w, h); } if (lastPlane == null || lastIndex != no) { long[] offsets = ifd.getStripOffsets(); boolean maybeCompressed = ifd.getCompression() == TiffCompression.NIKON; boolean compressed = vPredictor != null && curve != null && maybeCompressed; if (!maybeCompressed && dataSize == 14) dataSize = 16; ByteArrayOutputStream src = new ByteArrayOutputStream(); NikonCodec codec = new NikonCodec(); NikonCodecOptions options = new NikonCodecOptions(); options.width = getSizeX(); options.height = getSizeY(); options.bitsPerSample = dataSize; options.curve = curve; if (vPredictor != null) { options.vPredictor = new int[vPredictor.length]; } options.lossless = !lossyCompression; options.split = split; for (int i=0; i<byteCounts.length; i++) { byte[] t = new byte[(int) byteCounts[i]]; in.seek(offsets[i]); in.read(t); if (compressed) { options.maxBytes = (int) byteCounts[i]; System.arraycopy(vPredictor, 0, options.vPredictor, 0, vPredictor.length); t = codec.decompress(t, options); } src.write(t); } BitBuffer bb = new BitBuffer(src.toByteArray()); short[] pix = new short[getSizeX() * getSizeY() * 3]; src.close(); int[] colorMap = {1, 0, 2, 1}; // default color map short[] ifdColors = (short[]) ifd.get(COLOR_MAP); boolean colorsValid = false; if (ifdColors != null && ifdColors.length >= colorMap.length) { colorsValid = true; for (int q=0; q<colorMap.length; q++) { if (ifdColors[q] < 0 || ifdColors[q] > 2) { // found invalid channel index, use default color map instead colorsValid = false; break; } } if (colorsValid) { for (int q=0; q<colorMap.length; q++) { colorMap[q] = ifdColors[q]; } } } boolean interleaveRows = offsets.length == 1 && !maybeCompressed && !colorsValid; for (int row=0; row<getSizeY(); row++) { int realRow = interleaveRows ? (row < (getSizeY() / 2) ? row * 2 : (row - (getSizeY() / 2)) * 2 + 1) : row; for (int col=0; col<getSizeX(); col++) { short val = (short) (bb.getBits(dataSize) & 0xffff); int mapIndex = (realRow % 2) * 2 + (col % 2); int redOffset = realRow * getSizeX() + col; int greenOffset = (getSizeY() + realRow) * getSizeX() + col; int blueOffset = (2 * getSizeY() + realRow) * getSizeX() + col; if (colorMap[mapIndex] == 0) { pix[redOffset] = adjustForWhiteBalance(val, 0); } else if (colorMap[mapIndex] == 1) { pix[greenOffset] = adjustForWhiteBalance(val, 1); } else if (colorMap[mapIndex] == 2) { pix[blueOffset] = adjustForWhiteBalance(val, 2); } if (maybeCompressed && !compressed) { int toSkip = 0; if ((col % 10) == 9) { toSkip = 1; } if (col == getSizeX() - 1) { toSkip = 10; } bb.skipBits(toSkip * 8); } } } lastPlane = new byte[FormatTools.getPlaneSize(this)]; ImageTools.interpolate(pix, lastPlane, colorMap, getSizeX(), getSizeY(), isLittleEndian()); lastIndex = no; } int bpp = FormatTools.getBytesPerPixel(getPixelType()) * 3; int rowLen = w * bpp; int width = getSizeX() * bpp; for (int row=0; row<h; row++) { System.arraycopy( lastPlane, (row + y) * width + x * bpp, buf, row * rowLen, rowLen); } return buf; }
diff --git a/src/main/java/net/vhati/modmanager/core/XMLPatcher.java b/src/main/java/net/vhati/modmanager/core/XMLPatcher.java index d330c6d..6660478 100644 --- a/src/main/java/net/vhati/modmanager/core/XMLPatcher.java +++ b/src/main/java/net/vhati/modmanager/core/XMLPatcher.java @@ -1,580 +1,592 @@ package net.vhati.modmanager.core; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import net.vhati.modmanager.core.SloppyXMLParser; import org.jdom2.Attribute; import org.jdom2.Content; import org.jdom2.Document; import org.jdom2.Element; import org.jdom2.Namespace; import org.jdom2.filter.AbstractFilter; import org.jdom2.filter.ElementFilter; import org.jdom2.filter.Filter; import org.jdom2.input.JDOMParseException; import org.jdom2.input.SAXBuilder; /** * Programmatically edits existing XML with instructions from another XML doc. * Other tags are simply appended as-is. */ public class XMLPatcher { protected boolean globalPanic = false; protected Namespace modNS; protected Namespace modAppendNS; protected Namespace modOverwriteNS; public XMLPatcher() { modNS = Namespace.getNamespace( "mod", "mod" ); modAppendNS = Namespace.getNamespace( "mod-append", "mod-append" ); modOverwriteNS = Namespace.getNamespace( "mod-overwrite", "mod-overwrite" ); } public void setGlobalPanic( boolean b ) { globalPanic = b; } public Document patch( Document mainDoc, Document appendDoc ) { Document resultDoc = mainDoc.clone(); Element resultRoot = resultDoc.getRootElement(); Element appendRoot = appendDoc.getRootElement(); ElementFilter modFilter = new ElementFilter( modNS ); for ( Content content : appendRoot.getContent() ) { if ( modFilter.matches( content ) ) { Element node = (Element)content; boolean handled = false; List<Element> matchedNodes = handleModFind( resultRoot, node ); if ( matchedNodes != null ) { handled = true; for ( Element matchedNode : matchedNodes ) { handleModCommands( matchedNode, node ); } } if ( !handled ) { throw new IllegalArgumentException( String.format( "Unrecognized mod tag <%s> (%s).", node.getName(), getPathToRoot(node) ) ); } } else { resultRoot.addContent( content.clone() ); } } return resultDoc; } /** * Returns find results if node is a find tag, or null if it's not. * * An empty list will be returned if there were no matches. * * TODO: Throw an exception in callers if results are required. */ protected List<Element> handleModFind( Element contextNode, Element node ) { List<Element> result = null; if ( node.getNamespace().equals( modNS ) ) { if ( node.getName().equals( "findName" ) ) { String searchName = node.getAttributeValue( "name" ); String searchType = node.getAttributeValue( "type" ); boolean searchReverse = getAttributeBooleanValue( node, "reverse", true ); int searchStart = getAttributeIntValue( node, "start", 0 ); int searchLimit = getAttributeIntValue( node, "limit", 1 ); boolean panic = getAttributeBooleanValue( node, "panic", false ); if ( globalPanic ) panic = true; if ( searchName == null || searchName.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> requires a name attribute (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchType != null && searchType.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> type attribute, when present, can't be empty (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchStart < 0 ) throw new IllegalArgumentException( String.format( "<%s> 'start' attribute is not >= 0 (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchLimit < -1 ) throw new IllegalArgumentException( String.format( "<%s> 'limit' attribute is not >= -1 (%s).", node.getName(), getPathToRoot(node) ) ); Map<String,String> attrMap = new HashMap<String,String>(); attrMap.put( "name", searchName ); LikeFilter searchFilter = new LikeFilter( searchType, attrMap, null ); List<Element> matchedNodes = new ArrayList<Element>( contextNode.getContent( searchFilter ) ); if ( searchReverse ) Collections.reverse( matchedNodes ); if ( searchStart < matchedNodes.size() ) { if ( searchLimit > -1 ) { matchedNodes = matchedNodes.subList( searchStart, Math.max( matchedNodes.size(), searchStart + searchLimit ) ); } else if ( searchStart > 0 ) { matchedNodes = matchedNodes.subList( searchStart, matchedNodes.size() ); } } + else { + matchedNodes.clear(); + } if ( panic && matchedNodes.isEmpty() ) throw new NoSuchElementException( String.format( "<%s> was set to require results but found none (%s).", node.getName(), getPathToRoot(node) ) ); result = matchedNodes; } else if ( node.getName().equals( "findLike" ) ) { String searchType = node.getAttributeValue( "type" ); boolean searchReverse = getAttributeBooleanValue( node, "reverse", false ); int searchStart = getAttributeIntValue( node, "start", 0 ); int searchLimit = getAttributeIntValue( node, "limit", -1 ); boolean panic = getAttributeBooleanValue( node, "panic", false ); if ( globalPanic ) panic = true; if ( searchType != null && searchType.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> type attribute, when present, can't be empty (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchStart < 0 ) throw new IllegalArgumentException( String.format( "<%s> 'start' attribute is not >= 0 (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchLimit < -1 ) throw new IllegalArgumentException( String.format( "<%s> 'limit' attribute is not >= -1 (%s).", node.getName(), getPathToRoot(node) ) ); Map<String,String> attrMap = new HashMap<String,String>(); String searchValue = null; Element selectorNode = node.getChild( "selector", modNS ); if ( selectorNode != null ) { for ( Attribute attr : selectorNode.getAttributes() ) { if ( attr.getNamespace().equals( Namespace.NO_NAMESPACE ) ) { // Blank element values can't be detected as different from absent values (never null). // Forbid "" attributes for consistency. :/ if ( attr.getValue().length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> attributes, when present, can't be empty (%s).", selectorNode.getName(), getPathToRoot(selectorNode) ) ); attrMap.put( attr.getName(), attr.getValue() ); } } searchValue = selectorNode.getTextTrim(); // Never null, but often "". if ( searchValue.length() == 0 ) searchValue = null; } LikeFilter searchFilter = new LikeFilter( searchType, attrMap, searchValue ); List<Element> matchedNodes = new ArrayList<Element>( contextNode.getContent( searchFilter ) ); if ( searchReverse ) Collections.reverse( matchedNodes ); if ( searchStart < matchedNodes.size() ) { if ( searchLimit > -1 ) { matchedNodes = matchedNodes.subList( searchStart, Math.max( matchedNodes.size(), searchStart + searchLimit ) ); } else if ( searchStart > 0 ) { matchedNodes = matchedNodes.subList( searchStart, matchedNodes.size() ); } } + else { + matchedNodes.clear(); + } if ( panic && matchedNodes.isEmpty() ) throw new NoSuchElementException( String.format( "<%s> was set to require results but found none (%s).", node.getName(), getPathToRoot(node) ) ); result = matchedNodes; } else if ( node.getName().equals( "findWithChildLike" ) ) { String searchType = node.getAttributeValue( "type" ); String searchChildType = node.getAttributeValue( "child-type" ); boolean searchReverse = getAttributeBooleanValue( node, "reverse", false ); int searchStart = getAttributeIntValue( node, "start", 0 ); int searchLimit = getAttributeIntValue( node, "limit", -1 ); boolean panic = getAttributeBooleanValue( node, "panic", false ); if ( globalPanic ) panic = true; if ( searchType != null && searchType.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> type attribute, when present, can't be empty (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchChildType != null && searchChildType.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> child-type attribute, when present, can't be empty (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchStart < 0 ) throw new IllegalArgumentException( String.format( "<%s> 'start' attribute is not >= 0 (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchLimit < -1 ) throw new IllegalArgumentException( String.format( "<%s> 'limit' attribute is not >= -1 (%s).", node.getName(), getPathToRoot(node) ) ); Map<String,String> attrMap = new HashMap<String,String>(); String searchValue = null; Element selectorNode = node.getChild( "selector", modNS ); if ( selectorNode != null ) { for ( Attribute attr : selectorNode.getAttributes() ) { if ( attr.getNamespace().equals( Namespace.NO_NAMESPACE ) ) { // TODO: Forbid "" attributes, because blank value doesn't work? attrMap.put( attr.getName(), attr.getValue() ); } } searchValue = selectorNode.getTextTrim(); // Never null, but often "". if ( searchValue.length() == 0 ) searchValue = null; } LikeFilter searchChildFilter = new LikeFilter( searchChildType, attrMap, searchValue ); WithChildFilter searchFilter = new WithChildFilter( searchType, searchChildFilter ); List<Element> matchedNodes = new ArrayList<Element>( contextNode.getContent( searchFilter ) ); if ( searchReverse ) Collections.reverse( matchedNodes ); if ( searchStart < matchedNodes.size() ) { if ( searchLimit > -1 ) { matchedNodes = matchedNodes.subList( searchStart, Math.max( matchedNodes.size(), searchStart + searchLimit ) ); } else if ( searchStart > 0 ) { matchedNodes = matchedNodes.subList( searchStart, matchedNodes.size() ); } } + else { + matchedNodes.clear(); + } if ( panic && matchedNodes.isEmpty() ) throw new NoSuchElementException( String.format( "<%s> was set to require results but found none (%s).", node.getName(), getPathToRoot(node) ) ); result = matchedNodes; } else if ( node.getName().equals( "findComposite" ) ) { boolean searchReverse = getAttributeBooleanValue( node, "reverse", false ); int searchStart = getAttributeIntValue( node, "start", 0 ); int searchLimit = getAttributeIntValue( node, "limit", -1 ); boolean panic = getAttributeBooleanValue( node, "panic", false ); if ( globalPanic ) panic = true; if ( searchStart < 0 ) throw new IllegalArgumentException( String.format( "<%s> 'start' attribute is not >= 0 (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchLimit < -1 ) throw new IllegalArgumentException( String.format( "<%s> 'limit' attribute is not >= -1 (%s).", node.getName(), getPathToRoot(node) ) ); Element parNode = node.getChild( "par", modNS ); if ( parNode == null ) throw new IllegalArgumentException( String.format( "<%s> requires a <par> tag (%s).", node.getName(), getPathToRoot(node) ) ); List<Element> matchedNodes = handleModPar( contextNode, parNode ); if ( searchReverse ) Collections.reverse( matchedNodes ); if ( searchStart < matchedNodes.size() ) { if ( searchLimit > -1 ) { matchedNodes = matchedNodes.subList( searchStart, Math.max( matchedNodes.size(), searchStart + searchLimit ) ); } else if ( searchStart > 0 ) { matchedNodes = matchedNodes.subList( searchStart, matchedNodes.size() ); } } + else { + matchedNodes.clear(); + } if ( panic && matchedNodes.isEmpty() ) throw new NoSuchElementException( String.format( "<%s> was set to require results but found none (%s).", node.getName(), getPathToRoot(node) ) ); result = matchedNodes; } } return result; } /** * Returns collated find results (and par results, handled recursively), or null if node wasn't a par. * * Unique results from all finds will be combined and sorted in the order they appear under contextNode. */ protected List<Element> handleModPar( Element contextNode, Element node ) { List<Element> result = null; if ( node.getNamespace().equals( modNS ) ) { if ( node.getName().equals( "par" ) ) { String parOp = node.getAttributeValue( "op" ); if ( parOp == null || (!parOp.equals("AND") && !parOp.equals("OR")) ) throw new IllegalArgumentException( String.format( "Invalid \"op\" attribute (%s). Must be 'AND' or 'OR'.", getPathToRoot(node) ) ); boolean isAnd = parOp.equals("AND"); boolean isOr = parOp.equals("OR"); Set<Element> candidateSet = new HashSet<Element>(); for ( Element criteriaNode : node.getChildren() ) { List<Element> candidates; if ( criteriaNode.getName().equals( "par" ) && criteriaNode.getNamespace().equals( modNS ) ) { candidates = handleModPar( contextNode, criteriaNode ); } else { candidates = handleModFind( contextNode, criteriaNode ); if ( candidates == null ) throw new IllegalArgumentException( String.format( "Invalid <par> search criteria <%s> (%s). Must be a <find...> or <par>.", criteriaNode.getName(), getPathToRoot(criteriaNode) ) ); } if ( isOr || candidateSet.isEmpty() ) { candidateSet.addAll( candidates ); } else if ( isAnd ) { candidateSet.retainAll( candidates ); } } Map<Integer,Element> orderedCandidateMap = new TreeMap<Integer,Element>(); for ( Element candidate : candidateSet ) { int index = contextNode.indexOf( candidate ); orderedCandidateMap.put( new Integer(index), candidate ); } List<Element> matchedNodes = new ArrayList<Element>( orderedCandidateMap.values() ); result = matchedNodes; } } return result; } /** * Performs child mod-commands under node, against contextNode. * * TODO: Maybe have handleModCommand() returning null when unrecognized, * or an object with flags to continue or stop looping commands at * contextNode (e.g., halting after removeTag). */ protected void handleModCommands( Element contextNode, Element node ) { for ( Element cmdNode : node.getChildren() ) { boolean handled = false; if ( cmdNode.getNamespace().equals( modNS ) ) { // Handle nested finds. List<Element> matchedNodes = handleModFind( contextNode, cmdNode ); if ( matchedNodes != null ) { handled = true; for ( Element matchedNode : matchedNodes ) { handleModCommands( matchedNode, cmdNode ); } } else if ( cmdNode.getName().equals( "selector" ) ) { handled = true; // No-op. } else if ( cmdNode.getName().equals( "par" ) ) { handled = true; // No-op. } else if ( cmdNode.getName().equals( "setAttributes" ) ) { handled = true; for ( Attribute attrib : cmdNode.getAttributes() ) { contextNode.setAttribute( attrib.clone() ); } } else if ( cmdNode.getName().equals( "setValue" ) ) { handled = true; contextNode.setText( cmdNode.getTextTrim() ); } else if ( cmdNode.getName().equals( "removeTag" ) ) { handled = true; contextNode.detach(); break; } } else if ( cmdNode.getNamespace().equals( modAppendNS ) ) { // Append cmdNode (sans namespace) to the contextNode. handled = true; Element newNode = cmdNode.clone(); newNode.setNamespace( null ); contextNode.addContent( newNode ); } else if ( cmdNode.getNamespace().equals( modOverwriteNS ) ) { // Remove the first child with the same type and insert cmdNode at its position. // Or just append if nothing was replaced. handled = true; Element newNode = cmdNode.clone(); newNode.setNamespace( null ); Element doomedNode = contextNode.getChild( cmdNode.getName(), null ); if ( doomedNode != null ) { int doomedIndex = contextNode.indexOf( doomedNode ); doomedNode.detach(); contextNode.addContent( doomedIndex, newNode ); } else { contextNode.addContent( newNode ); } } if ( !handled ) { throw new IllegalArgumentException( String.format( "Unrecognized mod tag <%s> (%s).", cmdNode.getName(), getPathToRoot(cmdNode) ) ); } } } /** * Returns a string describing this element's location. * * Example: /root/event(SOME_NAME)/choice/text */ protected String getPathToRoot( Element node ) { StringBuilder buf = new StringBuilder(); String chunk; String tmp; while ( node != null ) { chunk = "/"+ node.getName(); tmp = node.getAttributeValue( "name" ); if ( tmp != null && tmp.length() > 0 ) chunk += "("+ tmp +")"; buf.insert( 0, chunk ); node = node.getParentElement(); } return buf.toString(); } /** * Returns the boolean value of an attribute, or a default when the attribute is null. * Only 'true' and 'false' are accepted. */ protected boolean getAttributeBooleanValue( Element node, String attrName, boolean defaultValue ) { String tmp = node.getAttributeValue( attrName ); if ( tmp == null ) return defaultValue; if ( tmp.equals( "true" ) ) { return true; } else if ( tmp.equals( "false" ) ) { return false; } else { throw new IllegalArgumentException( String.format( "Invalid boolean attribute \"%s\" (%s). Must be 'true' or 'false'.", attrName, getPathToRoot(node) ) ); } } /** * Returns the int value of an attribute, or a default when the attribute is null. */ protected int getAttributeIntValue( Element node, String attrName, int defaultValue ) { String tmp = node.getAttributeValue( attrName ); if ( tmp == null ) return defaultValue; try { return Integer.parseInt( tmp ); } catch ( NumberFormatException e ) { throw new IllegalArgumentException( String.format( "Invalid int attribute \"%s\" (%s).", attrName, getPathToRoot(node) ) ); } } /** * Matches elements with equal type/attributes/value. * Null args are ignored. A blank type or value arg is ignored. * All given attributes must be present on a candidate to match. * Attribute values in the map must not be null. */ protected static class LikeFilter extends AbstractFilter<Element> { private String type = null;; private Map<String,String> attrMap = null; private String value = null; public LikeFilter( String type, Element selectorNode ) { this.type = type; if ( selectorNode.hasAttributes() ) { this.attrMap = new HashMap<String,String>(); for ( Attribute attr : selectorNode.getAttributes() ) { attrMap.put( attr.getName(), attr.getValue() ); } } this.value = selectorNode.getTextTrim(); if ( this.value.length() == 0 ) this.value = null; } public LikeFilter( String type, Map<String,String> attrMap, String value ) { super(); if ( type != null && type.length() == 0 ) type = null; if ( value != null && value.length() == 0 ) value = null; this.type = type; this.attrMap = attrMap; this.value = value; } @Override public Element filter( Object content ) { if ( content instanceof Element == false ) return null; Element node = (Element)content; String tmp; if ( type != null ) { if ( type.equals( node.getName() ) == false ) { return null; } } if ( attrMap != null ) { for ( Map.Entry<String,String> entry : attrMap.entrySet() ) { String attrName = entry.getKey(); String attrValue = entry.getValue(); tmp = node.getAttributeValue( attrName ); if ( attrValue.equals( tmp ) == false ) { return null; } } } if ( value != null ) { if ( value.equals( node.getTextTrim() ) == false ) { return null; } } return node; } } /** * Matches elements with child elements that match a filter. * If the filter is null, matches all elements with children. */ protected static class WithChildFilter extends AbstractFilter<Element> { private String type; private Filter<Element> childFilter; public WithChildFilter( Filter<Element> childFilter ) { this( null, childFilter ); } public WithChildFilter( String type, Filter<Element> childFilter ) { this.type = type; this.childFilter = childFilter; } @Override public Element filter( Object content ) { if ( content instanceof Element == false ) return null; Element node = (Element)content; if ( type != null ) { if ( type.equals( node.getName() ) == false ) { return null; } } if ( childFilter != null ) { if ( node.getContent( childFilter ).isEmpty() ) return null; } else if ( node.getChildren().isEmpty() ) { return null; } return node; } } }
false
true
protected List<Element> handleModFind( Element contextNode, Element node ) { List<Element> result = null; if ( node.getNamespace().equals( modNS ) ) { if ( node.getName().equals( "findName" ) ) { String searchName = node.getAttributeValue( "name" ); String searchType = node.getAttributeValue( "type" ); boolean searchReverse = getAttributeBooleanValue( node, "reverse", true ); int searchStart = getAttributeIntValue( node, "start", 0 ); int searchLimit = getAttributeIntValue( node, "limit", 1 ); boolean panic = getAttributeBooleanValue( node, "panic", false ); if ( globalPanic ) panic = true; if ( searchName == null || searchName.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> requires a name attribute (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchType != null && searchType.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> type attribute, when present, can't be empty (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchStart < 0 ) throw new IllegalArgumentException( String.format( "<%s> 'start' attribute is not >= 0 (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchLimit < -1 ) throw new IllegalArgumentException( String.format( "<%s> 'limit' attribute is not >= -1 (%s).", node.getName(), getPathToRoot(node) ) ); Map<String,String> attrMap = new HashMap<String,String>(); attrMap.put( "name", searchName ); LikeFilter searchFilter = new LikeFilter( searchType, attrMap, null ); List<Element> matchedNodes = new ArrayList<Element>( contextNode.getContent( searchFilter ) ); if ( searchReverse ) Collections.reverse( matchedNodes ); if ( searchStart < matchedNodes.size() ) { if ( searchLimit > -1 ) { matchedNodes = matchedNodes.subList( searchStart, Math.max( matchedNodes.size(), searchStart + searchLimit ) ); } else if ( searchStart > 0 ) { matchedNodes = matchedNodes.subList( searchStart, matchedNodes.size() ); } } if ( panic && matchedNodes.isEmpty() ) throw new NoSuchElementException( String.format( "<%s> was set to require results but found none (%s).", node.getName(), getPathToRoot(node) ) ); result = matchedNodes; } else if ( node.getName().equals( "findLike" ) ) { String searchType = node.getAttributeValue( "type" ); boolean searchReverse = getAttributeBooleanValue( node, "reverse", false ); int searchStart = getAttributeIntValue( node, "start", 0 ); int searchLimit = getAttributeIntValue( node, "limit", -1 ); boolean panic = getAttributeBooleanValue( node, "panic", false ); if ( globalPanic ) panic = true; if ( searchType != null && searchType.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> type attribute, when present, can't be empty (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchStart < 0 ) throw new IllegalArgumentException( String.format( "<%s> 'start' attribute is not >= 0 (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchLimit < -1 ) throw new IllegalArgumentException( String.format( "<%s> 'limit' attribute is not >= -1 (%s).", node.getName(), getPathToRoot(node) ) ); Map<String,String> attrMap = new HashMap<String,String>(); String searchValue = null; Element selectorNode = node.getChild( "selector", modNS ); if ( selectorNode != null ) { for ( Attribute attr : selectorNode.getAttributes() ) { if ( attr.getNamespace().equals( Namespace.NO_NAMESPACE ) ) { // Blank element values can't be detected as different from absent values (never null). // Forbid "" attributes for consistency. :/ if ( attr.getValue().length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> attributes, when present, can't be empty (%s).", selectorNode.getName(), getPathToRoot(selectorNode) ) ); attrMap.put( attr.getName(), attr.getValue() ); } } searchValue = selectorNode.getTextTrim(); // Never null, but often "". if ( searchValue.length() == 0 ) searchValue = null; } LikeFilter searchFilter = new LikeFilter( searchType, attrMap, searchValue ); List<Element> matchedNodes = new ArrayList<Element>( contextNode.getContent( searchFilter ) ); if ( searchReverse ) Collections.reverse( matchedNodes ); if ( searchStart < matchedNodes.size() ) { if ( searchLimit > -1 ) { matchedNodes = matchedNodes.subList( searchStart, Math.max( matchedNodes.size(), searchStart + searchLimit ) ); } else if ( searchStart > 0 ) { matchedNodes = matchedNodes.subList( searchStart, matchedNodes.size() ); } } if ( panic && matchedNodes.isEmpty() ) throw new NoSuchElementException( String.format( "<%s> was set to require results but found none (%s).", node.getName(), getPathToRoot(node) ) ); result = matchedNodes; } else if ( node.getName().equals( "findWithChildLike" ) ) { String searchType = node.getAttributeValue( "type" ); String searchChildType = node.getAttributeValue( "child-type" ); boolean searchReverse = getAttributeBooleanValue( node, "reverse", false ); int searchStart = getAttributeIntValue( node, "start", 0 ); int searchLimit = getAttributeIntValue( node, "limit", -1 ); boolean panic = getAttributeBooleanValue( node, "panic", false ); if ( globalPanic ) panic = true; if ( searchType != null && searchType.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> type attribute, when present, can't be empty (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchChildType != null && searchChildType.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> child-type attribute, when present, can't be empty (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchStart < 0 ) throw new IllegalArgumentException( String.format( "<%s> 'start' attribute is not >= 0 (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchLimit < -1 ) throw new IllegalArgumentException( String.format( "<%s> 'limit' attribute is not >= -1 (%s).", node.getName(), getPathToRoot(node) ) ); Map<String,String> attrMap = new HashMap<String,String>(); String searchValue = null; Element selectorNode = node.getChild( "selector", modNS ); if ( selectorNode != null ) { for ( Attribute attr : selectorNode.getAttributes() ) { if ( attr.getNamespace().equals( Namespace.NO_NAMESPACE ) ) { // TODO: Forbid "" attributes, because blank value doesn't work? attrMap.put( attr.getName(), attr.getValue() ); } } searchValue = selectorNode.getTextTrim(); // Never null, but often "". if ( searchValue.length() == 0 ) searchValue = null; } LikeFilter searchChildFilter = new LikeFilter( searchChildType, attrMap, searchValue ); WithChildFilter searchFilter = new WithChildFilter( searchType, searchChildFilter ); List<Element> matchedNodes = new ArrayList<Element>( contextNode.getContent( searchFilter ) ); if ( searchReverse ) Collections.reverse( matchedNodes ); if ( searchStart < matchedNodes.size() ) { if ( searchLimit > -1 ) { matchedNodes = matchedNodes.subList( searchStart, Math.max( matchedNodes.size(), searchStart + searchLimit ) ); } else if ( searchStart > 0 ) { matchedNodes = matchedNodes.subList( searchStart, matchedNodes.size() ); } } if ( panic && matchedNodes.isEmpty() ) throw new NoSuchElementException( String.format( "<%s> was set to require results but found none (%s).", node.getName(), getPathToRoot(node) ) ); result = matchedNodes; } else if ( node.getName().equals( "findComposite" ) ) { boolean searchReverse = getAttributeBooleanValue( node, "reverse", false ); int searchStart = getAttributeIntValue( node, "start", 0 ); int searchLimit = getAttributeIntValue( node, "limit", -1 ); boolean panic = getAttributeBooleanValue( node, "panic", false ); if ( globalPanic ) panic = true; if ( searchStart < 0 ) throw new IllegalArgumentException( String.format( "<%s> 'start' attribute is not >= 0 (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchLimit < -1 ) throw new IllegalArgumentException( String.format( "<%s> 'limit' attribute is not >= -1 (%s).", node.getName(), getPathToRoot(node) ) ); Element parNode = node.getChild( "par", modNS ); if ( parNode == null ) throw new IllegalArgumentException( String.format( "<%s> requires a <par> tag (%s).", node.getName(), getPathToRoot(node) ) ); List<Element> matchedNodes = handleModPar( contextNode, parNode ); if ( searchReverse ) Collections.reverse( matchedNodes ); if ( searchStart < matchedNodes.size() ) { if ( searchLimit > -1 ) { matchedNodes = matchedNodes.subList( searchStart, Math.max( matchedNodes.size(), searchStart + searchLimit ) ); } else if ( searchStart > 0 ) { matchedNodes = matchedNodes.subList( searchStart, matchedNodes.size() ); } } if ( panic && matchedNodes.isEmpty() ) throw new NoSuchElementException( String.format( "<%s> was set to require results but found none (%s).", node.getName(), getPathToRoot(node) ) ); result = matchedNodes; } } return result; }
protected List<Element> handleModFind( Element contextNode, Element node ) { List<Element> result = null; if ( node.getNamespace().equals( modNS ) ) { if ( node.getName().equals( "findName" ) ) { String searchName = node.getAttributeValue( "name" ); String searchType = node.getAttributeValue( "type" ); boolean searchReverse = getAttributeBooleanValue( node, "reverse", true ); int searchStart = getAttributeIntValue( node, "start", 0 ); int searchLimit = getAttributeIntValue( node, "limit", 1 ); boolean panic = getAttributeBooleanValue( node, "panic", false ); if ( globalPanic ) panic = true; if ( searchName == null || searchName.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> requires a name attribute (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchType != null && searchType.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> type attribute, when present, can't be empty (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchStart < 0 ) throw new IllegalArgumentException( String.format( "<%s> 'start' attribute is not >= 0 (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchLimit < -1 ) throw new IllegalArgumentException( String.format( "<%s> 'limit' attribute is not >= -1 (%s).", node.getName(), getPathToRoot(node) ) ); Map<String,String> attrMap = new HashMap<String,String>(); attrMap.put( "name", searchName ); LikeFilter searchFilter = new LikeFilter( searchType, attrMap, null ); List<Element> matchedNodes = new ArrayList<Element>( contextNode.getContent( searchFilter ) ); if ( searchReverse ) Collections.reverse( matchedNodes ); if ( searchStart < matchedNodes.size() ) { if ( searchLimit > -1 ) { matchedNodes = matchedNodes.subList( searchStart, Math.max( matchedNodes.size(), searchStart + searchLimit ) ); } else if ( searchStart > 0 ) { matchedNodes = matchedNodes.subList( searchStart, matchedNodes.size() ); } } else { matchedNodes.clear(); } if ( panic && matchedNodes.isEmpty() ) throw new NoSuchElementException( String.format( "<%s> was set to require results but found none (%s).", node.getName(), getPathToRoot(node) ) ); result = matchedNodes; } else if ( node.getName().equals( "findLike" ) ) { String searchType = node.getAttributeValue( "type" ); boolean searchReverse = getAttributeBooleanValue( node, "reverse", false ); int searchStart = getAttributeIntValue( node, "start", 0 ); int searchLimit = getAttributeIntValue( node, "limit", -1 ); boolean panic = getAttributeBooleanValue( node, "panic", false ); if ( globalPanic ) panic = true; if ( searchType != null && searchType.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> type attribute, when present, can't be empty (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchStart < 0 ) throw new IllegalArgumentException( String.format( "<%s> 'start' attribute is not >= 0 (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchLimit < -1 ) throw new IllegalArgumentException( String.format( "<%s> 'limit' attribute is not >= -1 (%s).", node.getName(), getPathToRoot(node) ) ); Map<String,String> attrMap = new HashMap<String,String>(); String searchValue = null; Element selectorNode = node.getChild( "selector", modNS ); if ( selectorNode != null ) { for ( Attribute attr : selectorNode.getAttributes() ) { if ( attr.getNamespace().equals( Namespace.NO_NAMESPACE ) ) { // Blank element values can't be detected as different from absent values (never null). // Forbid "" attributes for consistency. :/ if ( attr.getValue().length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> attributes, when present, can't be empty (%s).", selectorNode.getName(), getPathToRoot(selectorNode) ) ); attrMap.put( attr.getName(), attr.getValue() ); } } searchValue = selectorNode.getTextTrim(); // Never null, but often "". if ( searchValue.length() == 0 ) searchValue = null; } LikeFilter searchFilter = new LikeFilter( searchType, attrMap, searchValue ); List<Element> matchedNodes = new ArrayList<Element>( contextNode.getContent( searchFilter ) ); if ( searchReverse ) Collections.reverse( matchedNodes ); if ( searchStart < matchedNodes.size() ) { if ( searchLimit > -1 ) { matchedNodes = matchedNodes.subList( searchStart, Math.max( matchedNodes.size(), searchStart + searchLimit ) ); } else if ( searchStart > 0 ) { matchedNodes = matchedNodes.subList( searchStart, matchedNodes.size() ); } } else { matchedNodes.clear(); } if ( panic && matchedNodes.isEmpty() ) throw new NoSuchElementException( String.format( "<%s> was set to require results but found none (%s).", node.getName(), getPathToRoot(node) ) ); result = matchedNodes; } else if ( node.getName().equals( "findWithChildLike" ) ) { String searchType = node.getAttributeValue( "type" ); String searchChildType = node.getAttributeValue( "child-type" ); boolean searchReverse = getAttributeBooleanValue( node, "reverse", false ); int searchStart = getAttributeIntValue( node, "start", 0 ); int searchLimit = getAttributeIntValue( node, "limit", -1 ); boolean panic = getAttributeBooleanValue( node, "panic", false ); if ( globalPanic ) panic = true; if ( searchType != null && searchType.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> type attribute, when present, can't be empty (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchChildType != null && searchChildType.length() == 0 ) throw new IllegalArgumentException( String.format( "<%s> child-type attribute, when present, can't be empty (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchStart < 0 ) throw new IllegalArgumentException( String.format( "<%s> 'start' attribute is not >= 0 (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchLimit < -1 ) throw new IllegalArgumentException( String.format( "<%s> 'limit' attribute is not >= -1 (%s).", node.getName(), getPathToRoot(node) ) ); Map<String,String> attrMap = new HashMap<String,String>(); String searchValue = null; Element selectorNode = node.getChild( "selector", modNS ); if ( selectorNode != null ) { for ( Attribute attr : selectorNode.getAttributes() ) { if ( attr.getNamespace().equals( Namespace.NO_NAMESPACE ) ) { // TODO: Forbid "" attributes, because blank value doesn't work? attrMap.put( attr.getName(), attr.getValue() ); } } searchValue = selectorNode.getTextTrim(); // Never null, but often "". if ( searchValue.length() == 0 ) searchValue = null; } LikeFilter searchChildFilter = new LikeFilter( searchChildType, attrMap, searchValue ); WithChildFilter searchFilter = new WithChildFilter( searchType, searchChildFilter ); List<Element> matchedNodes = new ArrayList<Element>( contextNode.getContent( searchFilter ) ); if ( searchReverse ) Collections.reverse( matchedNodes ); if ( searchStart < matchedNodes.size() ) { if ( searchLimit > -1 ) { matchedNodes = matchedNodes.subList( searchStart, Math.max( matchedNodes.size(), searchStart + searchLimit ) ); } else if ( searchStart > 0 ) { matchedNodes = matchedNodes.subList( searchStart, matchedNodes.size() ); } } else { matchedNodes.clear(); } if ( panic && matchedNodes.isEmpty() ) throw new NoSuchElementException( String.format( "<%s> was set to require results but found none (%s).", node.getName(), getPathToRoot(node) ) ); result = matchedNodes; } else if ( node.getName().equals( "findComposite" ) ) { boolean searchReverse = getAttributeBooleanValue( node, "reverse", false ); int searchStart = getAttributeIntValue( node, "start", 0 ); int searchLimit = getAttributeIntValue( node, "limit", -1 ); boolean panic = getAttributeBooleanValue( node, "panic", false ); if ( globalPanic ) panic = true; if ( searchStart < 0 ) throw new IllegalArgumentException( String.format( "<%s> 'start' attribute is not >= 0 (%s).", node.getName(), getPathToRoot(node) ) ); if ( searchLimit < -1 ) throw new IllegalArgumentException( String.format( "<%s> 'limit' attribute is not >= -1 (%s).", node.getName(), getPathToRoot(node) ) ); Element parNode = node.getChild( "par", modNS ); if ( parNode == null ) throw new IllegalArgumentException( String.format( "<%s> requires a <par> tag (%s).", node.getName(), getPathToRoot(node) ) ); List<Element> matchedNodes = handleModPar( contextNode, parNode ); if ( searchReverse ) Collections.reverse( matchedNodes ); if ( searchStart < matchedNodes.size() ) { if ( searchLimit > -1 ) { matchedNodes = matchedNodes.subList( searchStart, Math.max( matchedNodes.size(), searchStart + searchLimit ) ); } else if ( searchStart > 0 ) { matchedNodes = matchedNodes.subList( searchStart, matchedNodes.size() ); } } else { matchedNodes.clear(); } if ( panic && matchedNodes.isEmpty() ) throw new NoSuchElementException( String.format( "<%s> was set to require results but found none (%s).", node.getName(), getPathToRoot(node) ) ); result = matchedNodes; } } return result; }
diff --git a/src/org/geometerplus/android/fbreader/FBReader.java b/src/org/geometerplus/android/fbreader/FBReader.java index 346ffb5..6883159 100644 --- a/src/org/geometerplus/android/fbreader/FBReader.java +++ b/src/org/geometerplus/android/fbreader/FBReader.java @@ -1,417 +1,422 @@ /* * Copyright (C) 2009-2010 Geometer Plus <[email protected]> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301, USA. */ package org.geometerplus.android.fbreader; import android.app.SearchManager; import android.graphics.Bitmap; import android.os.Bundle; import android.os.Handler; import android.view.View; import android.view.ViewGroup; import android.view.animation.RotateAnimation; import android.widget.*; import org.geometerplus.zlibrary.core.application.ZLApplication; import org.geometerplus.zlibrary.core.image.ZLImage; import org.geometerplus.zlibrary.core.resources.ZLResource; import org.geometerplus.zlibrary.core.view.ZLView; import org.geometerplus.zlibrary.text.view.ZLTextView; import org.geometerplus.zlibrary.ui.android.image.ZLAndroidImageData; import org.geometerplus.zlibrary.ui.android.image.ZLAndroidImageManager; import org.geometerplus.zlibrary.ui.android.library.ZLAndroidActivity; import org.geometerplus.zlibrary.ui.android.R; import org.geometerplus.fbreader.fbreader.ActionCode; import org.geometerplus.fbreader.formats.FormatPlugin; import org.geometerplus.fbreader.formats.PluginCollection; import org.geometerplus.fbreader.library.Author; import org.geometerplus.fbreader.library.Book; public final class FBReader extends ZLAndroidActivity { static FBReader Instance; //private int myFullScreenFlag; private boolean myReadMode; private Book myViewBook; public final ZLResource Resource = ZLResource.resource("fbreader"); boolean isReadMode() { return myReadMode; } private static class TextSearchButtonPanel implements ZLApplication.ButtonPanel { boolean Visible; ControlPanel ControlPanel; public void hide() { Visible = false; if (ControlPanel != null) { ControlPanel.hide(false); } } public void updateStates() { if (ControlPanel != null) { ControlPanel.updateStates(); } } } private static TextSearchButtonPanel myPanel; @Override public void onCreate(Bundle icicle) { EPDView.Instance().setVdsActive(true); super.onCreate(icicle); /* android.telephony.TelephonyManager tele = (android.telephony.TelephonyManager)getSystemService(TELEPHONY_SERVICE); System.err.println(tele.getNetworkOperator()); */ Instance = this; /*final ZLAndroidApplication application = ZLAndroidApplication.Instance(); myFullScreenFlag = application.ShowStatusBarOption.getValue() ? 0 : WindowManager.LayoutParams.FLAG_FULLSCREEN; getWindow().setFlags( WindowManager.LayoutParams.FLAG_FULLSCREEN, myFullScreenFlag );*/ if (myPanel == null) { myPanel = new TextSearchButtonPanel(); ZLApplication.Instance().registerButtonPanel(myPanel); } final TextView statusPositionText = (TextView) findViewById(R.id.statusbar_position_text); final TextView bookPositionText = (TextView) findViewById(R.id.book_position_text); final SeekBar bookPositionSlider = (SeekBar) findViewById(R.id.book_position_slider); bookPositionText.setText(""); statusPositionText.setText(""); bookPositionSlider.setProgress(0); bookPositionSlider.setMax(1); bookPositionSlider.setVisibility(View.INVISIBLE); bookPositionSlider.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { private boolean myInTouch; private void gotoPage(int page) { final ZLView view = ZLApplication.Instance().getCurrentView(); if (view instanceof ZLTextView) { ZLTextView textView = (ZLTextView) view; if (page == 1) { textView.gotoHome(); } else { textView.gotoPage(page); } } } public void onStopTrackingTouch(SeekBar seekBar) { gotoPage(seekBar.getProgress() + 1); updateEpdView(0); myInTouch = false; } public void onStartTrackingTouch(SeekBar seekBar) { myInTouch = true; } public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { if (fromUser) { final int page = progress + 1; final int pagesNumber = seekBar.getMax() + 1; bookPositionText.setText(makePositionText(page, pagesNumber)); if (!myInTouch) { gotoPage(page); updateEpdView(250); } } } }); final TextView bookNoCover = (TextView) findViewById(R.id.book_no_cover_text); bookNoCover.setText(Resource.getResource("noCover").getValue()); } private static String makePositionText(int page, int pagesNumber) { return "" + page + " / " + pagesNumber; } @Override public void onStart() { super.onStart(); /*final ZLAndroidApplication application = ZLAndroidApplication.Instance(); final int fullScreenFlag = application.ShowStatusBarOption.getValue() ? 0 : WindowManager.LayoutParams.FLAG_FULLSCREEN; if (fullScreenFlag != myFullScreenFlag) { finish(); startActivity(new Intent(this, this.getClass())); }*/ if (myPanel.ControlPanel == null) { myPanel.ControlPanel = new ControlPanel(this); myPanel.ControlPanel.addButton(ActionCode.FIND_PREVIOUS, false, R.drawable.text_search_previous); myPanel.ControlPanel.addButton(ActionCode.CLEAR_FIND_RESULTS, true, R.drawable.text_search_close); myPanel.ControlPanel.addButton(ActionCode.FIND_NEXT, false, R.drawable.text_search_next); RelativeLayout root = (RelativeLayout)findViewById(R.id.root_view); RelativeLayout.LayoutParams p = new RelativeLayout.LayoutParams( RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.WRAP_CONTENT); p.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM); p.addRule(RelativeLayout.CENTER_HORIZONTAL); root.addView(myPanel.ControlPanel, p); } final LinearLayout view = (LinearLayout) findViewById(R.id.epd_layout); EPDView.Instance().bindLayout((ViewGroup) view); EPDView.Instance().setVdsActive(true); EPDView.Instance().updateEpdViewDelay(200); } //private PowerManager.WakeLock myWakeLock; @Override public void onResume() { super.onResume(); if (myPanel.ControlPanel != null) { myPanel.ControlPanel.setVisibility(myPanel.Visible ? View.VISIBLE : View.GONE); } /*if (ZLAndroidApplication.Instance().DontTurnScreenOffOption.getValue()) { myWakeLock = ((PowerManager)getSystemService(POWER_SERVICE)). newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, "FBReader"); myWakeLock.acquire(); } else { myWakeLock = null; }*/ myReadMode = true; } @Override public void onPause() { /*if (myWakeLock != null) { myWakeLock.release(); }*/ if (myPanel.ControlPanel != null) { myPanel.Visible = myPanel.ControlPanel.getVisibility() == View.VISIBLE; } myReadMode = false; super.onPause(); } @Override public void onStop() { EPDView.Instance().setVdsActive(false); if (myPanel.ControlPanel != null) { myPanel.ControlPanel.hide(false); myPanel.ControlPanel = null; } super.onStop(); } private final Handler myNotifyApplicationHandler = new Handler() { @Override public void handleMessage(android.os.Message msg) { final boolean singleChange = msg.what == 1; updateEpdView(singleChange ? 0 : 200); }; }; @Override public void notifyApplicationChanges(boolean singleChange) { myNotifyApplicationHandler.sendEmptyMessage(singleChange ? 1 : 0); } private int myCoverWidth; private int myCoverHeight; public void onEpdRepaintFinished() { final org.geometerplus.fbreader.fbreader.FBReader fbreader = (org.geometerplus.fbreader.fbreader.FBReader)ZLApplication.Instance(); final TextView bookTitle = (TextView) findViewById(R.id.book_title); final TextView bookAuthors = (TextView) findViewById(R.id.book_authors); final ImageView bookCover = (ImageView) findViewById(R.id.book_cover); final TextView bookNoCoverText = (TextView) findViewById(R.id.book_no_cover_text); final RelativeLayout bookNoCoverLayout = (RelativeLayout) findViewById(R.id.book_no_cover_layout); if (myCoverWidth == 0) { myCoverWidth = bookCover.getWidth(); myCoverHeight = bookCover.getHeight(); final int viewHeight = myCoverWidth * 4 / 3; if (myCoverHeight > viewHeight) { final int margin = (myCoverHeight - viewHeight) / 2; ViewGroup.MarginLayoutParams params = (ViewGroup.MarginLayoutParams) bookNoCoverLayout.getLayoutParams(); params.topMargin = params.bottomMargin = margin; bookNoCoverLayout.invalidate(); bookNoCoverLayout.requestLayout(); } } bookCover.setAnimation(null); + bookCover.setPadding(0, 0, 0, 0); bookNoCoverText.setAnimation(null); if (fbreader.Model != null && fbreader.Model.Book != null) { if (fbreader.Model.Book != myViewBook) { myViewBook = fbreader.Model.Book; bookTitle.setText(myViewBook.getTitle()); int count = 0; final StringBuilder authors = new StringBuilder(); for (Author a: myViewBook.authors()) { if (count++ > 0) { authors.append(", "); } authors.append(a.DisplayName); if (count == 5) { break; } } bookAuthors.setText(authors.toString()); Bitmap coverBitmap = null; final FormatPlugin plugin = PluginCollection.instance().getPlugin(myViewBook.File); if (plugin != null) { final ZLImage image = plugin.readCover(myViewBook); if (image != null) { final ZLAndroidImageManager mgr = (ZLAndroidImageManager) ZLAndroidImageManager.Instance(); ZLAndroidImageData data = mgr.getImageData(image); if (data != null) { coverBitmap = data.getBitmap(2 * myCoverWidth, 2 * myCoverHeight); } } } if (coverBitmap != null) { bookCover.setImageBitmap(coverBitmap); bookCover.setVisibility(View.VISIBLE); bookNoCoverLayout.setVisibility(View.GONE); } else { bookCover.setImageDrawable(null); bookCover.setVisibility(View.GONE); bookNoCoverLayout.setVisibility(View.VISIBLE); } } if (EPDView.Instance().rotateFlag()) { final RotateAnimation anim = new RotateAnimation(90.0f, 90.0f, RotateAnimation.RELATIVE_TO_SELF, 0.5f, RotateAnimation.RELATIVE_TO_SELF, 0.5f); anim.setFillEnabled(true); anim.setFillAfter(true); final View coverView = bookCover.getVisibility() == View.VISIBLE ? bookCover : bookNoCoverText; coverView.startAnimation(anim); + if (coverView == bookCover) { + final int padding = (bookCover.getHeight() - bookCover.getWidth()) / 2; + bookCover.setPadding(0, padding, 0, padding); + } } } else { myViewBook = null; bookTitle.setText(""); bookAuthors.setText(""); bookCover.setImageDrawable(null); bookCover.setVisibility(View.VISIBLE); bookNoCoverLayout.setVisibility(View.GONE); } findViewById(R.id.root_view).invalidate(); final TextView bookPositionText = (TextView) findViewById(R.id.book_position_text); final SeekBar bookPositionSlider = (SeekBar) findViewById(R.id.book_position_slider); final ZLView view = fbreader.getCurrentView(); if (view instanceof ZLTextView && ((ZLTextView) view).getModel() != null && ((ZLTextView) view).getModel().getParagraphsNumber() != 0) { ZLTextView textView = (ZLTextView) view; final int page = textView.computeCurrentPage(); final int pagesNumber = textView.computePageNumber(); bookPositionText.setText(makePositionText(page, pagesNumber)); bookPositionSlider.setVisibility(View.VISIBLE); bookPositionSlider.setMax(pagesNumber - 1); bookPositionSlider.setProgress(page - 1); } else { bookPositionText.setText(""); bookPositionSlider.setProgress(0); bookPositionSlider.setMax(1); bookPositionSlider.setVisibility(View.INVISIBLE); } if (SynchronousActivity.Instance != null) { SynchronousActivity.Instance.updateImage(); } } public void updateEpdView(int delay) { updateEpdStatusbar(); if (delay <= 0) { EPDView.Instance().updateEpdView(); } else { EPDView.Instance().updateEpdViewDelay(delay); } } private void updateEpdStatusbar() { final TextView statusPositionText = (TextView) findViewById(R.id.statusbar_position_text); final ZLView view = ZLApplication.Instance().getCurrentView(); if (view instanceof ZLTextView && ((ZLTextView) view).getModel() != null && ((ZLTextView) view).getModel().getParagraphsNumber() != 0) { ZLTextView textView = (ZLTextView) view; final int page = textView.computeCurrentPage(); final int pagesNumber = textView.computePageNumber(); statusPositionText.setText(makePositionText(page, pagesNumber)); } else { statusPositionText.setText(""); } } void showTextSearchControls(boolean show) { if (myPanel.ControlPanel != null) { if (show) { myPanel.ControlPanel.show(true); } else { myPanel.ControlPanel.hide(false); } } } protected ZLApplication createApplication(String fileName) { new SQLiteBooksDatabase(); String[] args = (fileName != null) ? new String[] { fileName } : new String[0]; return new org.geometerplus.fbreader.fbreader.FBReader(args); } @Override public boolean onSearchRequested() { if (myPanel.ControlPanel != null) { final boolean visible = myPanel.ControlPanel.getVisibility() == View.VISIBLE; myPanel.ControlPanel.hide(false); SearchManager manager = (SearchManager)getSystemService(SEARCH_SERVICE); manager.setOnCancelListener(new SearchManager.OnCancelListener() { public void onCancel() { if ((myPanel.ControlPanel != null) && visible) { myPanel.ControlPanel.show(false); } } }); } final org.geometerplus.fbreader.fbreader.FBReader fbreader = (org.geometerplus.fbreader.fbreader.FBReader)ZLApplication.Instance(); startSearch(fbreader.TextSearchPatternOption.getValue(), true, null, false); return true; } }
false
true
public void onEpdRepaintFinished() { final org.geometerplus.fbreader.fbreader.FBReader fbreader = (org.geometerplus.fbreader.fbreader.FBReader)ZLApplication.Instance(); final TextView bookTitle = (TextView) findViewById(R.id.book_title); final TextView bookAuthors = (TextView) findViewById(R.id.book_authors); final ImageView bookCover = (ImageView) findViewById(R.id.book_cover); final TextView bookNoCoverText = (TextView) findViewById(R.id.book_no_cover_text); final RelativeLayout bookNoCoverLayout = (RelativeLayout) findViewById(R.id.book_no_cover_layout); if (myCoverWidth == 0) { myCoverWidth = bookCover.getWidth(); myCoverHeight = bookCover.getHeight(); final int viewHeight = myCoverWidth * 4 / 3; if (myCoverHeight > viewHeight) { final int margin = (myCoverHeight - viewHeight) / 2; ViewGroup.MarginLayoutParams params = (ViewGroup.MarginLayoutParams) bookNoCoverLayout.getLayoutParams(); params.topMargin = params.bottomMargin = margin; bookNoCoverLayout.invalidate(); bookNoCoverLayout.requestLayout(); } } bookCover.setAnimation(null); bookNoCoverText.setAnimation(null); if (fbreader.Model != null && fbreader.Model.Book != null) { if (fbreader.Model.Book != myViewBook) { myViewBook = fbreader.Model.Book; bookTitle.setText(myViewBook.getTitle()); int count = 0; final StringBuilder authors = new StringBuilder(); for (Author a: myViewBook.authors()) { if (count++ > 0) { authors.append(", "); } authors.append(a.DisplayName); if (count == 5) { break; } } bookAuthors.setText(authors.toString()); Bitmap coverBitmap = null; final FormatPlugin plugin = PluginCollection.instance().getPlugin(myViewBook.File); if (plugin != null) { final ZLImage image = plugin.readCover(myViewBook); if (image != null) { final ZLAndroidImageManager mgr = (ZLAndroidImageManager) ZLAndroidImageManager.Instance(); ZLAndroidImageData data = mgr.getImageData(image); if (data != null) { coverBitmap = data.getBitmap(2 * myCoverWidth, 2 * myCoverHeight); } } } if (coverBitmap != null) { bookCover.setImageBitmap(coverBitmap); bookCover.setVisibility(View.VISIBLE); bookNoCoverLayout.setVisibility(View.GONE); } else { bookCover.setImageDrawable(null); bookCover.setVisibility(View.GONE); bookNoCoverLayout.setVisibility(View.VISIBLE); } } if (EPDView.Instance().rotateFlag()) { final RotateAnimation anim = new RotateAnimation(90.0f, 90.0f, RotateAnimation.RELATIVE_TO_SELF, 0.5f, RotateAnimation.RELATIVE_TO_SELF, 0.5f); anim.setFillEnabled(true); anim.setFillAfter(true); final View coverView = bookCover.getVisibility() == View.VISIBLE ? bookCover : bookNoCoverText; coverView.startAnimation(anim); } } else { myViewBook = null; bookTitle.setText(""); bookAuthors.setText(""); bookCover.setImageDrawable(null); bookCover.setVisibility(View.VISIBLE); bookNoCoverLayout.setVisibility(View.GONE); } findViewById(R.id.root_view).invalidate(); final TextView bookPositionText = (TextView) findViewById(R.id.book_position_text); final SeekBar bookPositionSlider = (SeekBar) findViewById(R.id.book_position_slider); final ZLView view = fbreader.getCurrentView(); if (view instanceof ZLTextView && ((ZLTextView) view).getModel() != null && ((ZLTextView) view).getModel().getParagraphsNumber() != 0) { ZLTextView textView = (ZLTextView) view; final int page = textView.computeCurrentPage(); final int pagesNumber = textView.computePageNumber(); bookPositionText.setText(makePositionText(page, pagesNumber)); bookPositionSlider.setVisibility(View.VISIBLE); bookPositionSlider.setMax(pagesNumber - 1); bookPositionSlider.setProgress(page - 1); } else { bookPositionText.setText(""); bookPositionSlider.setProgress(0); bookPositionSlider.setMax(1); bookPositionSlider.setVisibility(View.INVISIBLE); } if (SynchronousActivity.Instance != null) { SynchronousActivity.Instance.updateImage(); } }
public void onEpdRepaintFinished() { final org.geometerplus.fbreader.fbreader.FBReader fbreader = (org.geometerplus.fbreader.fbreader.FBReader)ZLApplication.Instance(); final TextView bookTitle = (TextView) findViewById(R.id.book_title); final TextView bookAuthors = (TextView) findViewById(R.id.book_authors); final ImageView bookCover = (ImageView) findViewById(R.id.book_cover); final TextView bookNoCoverText = (TextView) findViewById(R.id.book_no_cover_text); final RelativeLayout bookNoCoverLayout = (RelativeLayout) findViewById(R.id.book_no_cover_layout); if (myCoverWidth == 0) { myCoverWidth = bookCover.getWidth(); myCoverHeight = bookCover.getHeight(); final int viewHeight = myCoverWidth * 4 / 3; if (myCoverHeight > viewHeight) { final int margin = (myCoverHeight - viewHeight) / 2; ViewGroup.MarginLayoutParams params = (ViewGroup.MarginLayoutParams) bookNoCoverLayout.getLayoutParams(); params.topMargin = params.bottomMargin = margin; bookNoCoverLayout.invalidate(); bookNoCoverLayout.requestLayout(); } } bookCover.setAnimation(null); bookCover.setPadding(0, 0, 0, 0); bookNoCoverText.setAnimation(null); if (fbreader.Model != null && fbreader.Model.Book != null) { if (fbreader.Model.Book != myViewBook) { myViewBook = fbreader.Model.Book; bookTitle.setText(myViewBook.getTitle()); int count = 0; final StringBuilder authors = new StringBuilder(); for (Author a: myViewBook.authors()) { if (count++ > 0) { authors.append(", "); } authors.append(a.DisplayName); if (count == 5) { break; } } bookAuthors.setText(authors.toString()); Bitmap coverBitmap = null; final FormatPlugin plugin = PluginCollection.instance().getPlugin(myViewBook.File); if (plugin != null) { final ZLImage image = plugin.readCover(myViewBook); if (image != null) { final ZLAndroidImageManager mgr = (ZLAndroidImageManager) ZLAndroidImageManager.Instance(); ZLAndroidImageData data = mgr.getImageData(image); if (data != null) { coverBitmap = data.getBitmap(2 * myCoverWidth, 2 * myCoverHeight); } } } if (coverBitmap != null) { bookCover.setImageBitmap(coverBitmap); bookCover.setVisibility(View.VISIBLE); bookNoCoverLayout.setVisibility(View.GONE); } else { bookCover.setImageDrawable(null); bookCover.setVisibility(View.GONE); bookNoCoverLayout.setVisibility(View.VISIBLE); } } if (EPDView.Instance().rotateFlag()) { final RotateAnimation anim = new RotateAnimation(90.0f, 90.0f, RotateAnimation.RELATIVE_TO_SELF, 0.5f, RotateAnimation.RELATIVE_TO_SELF, 0.5f); anim.setFillEnabled(true); anim.setFillAfter(true); final View coverView = bookCover.getVisibility() == View.VISIBLE ? bookCover : bookNoCoverText; coverView.startAnimation(anim); if (coverView == bookCover) { final int padding = (bookCover.getHeight() - bookCover.getWidth()) / 2; bookCover.setPadding(0, padding, 0, padding); } } } else { myViewBook = null; bookTitle.setText(""); bookAuthors.setText(""); bookCover.setImageDrawable(null); bookCover.setVisibility(View.VISIBLE); bookNoCoverLayout.setVisibility(View.GONE); } findViewById(R.id.root_view).invalidate(); final TextView bookPositionText = (TextView) findViewById(R.id.book_position_text); final SeekBar bookPositionSlider = (SeekBar) findViewById(R.id.book_position_slider); final ZLView view = fbreader.getCurrentView(); if (view instanceof ZLTextView && ((ZLTextView) view).getModel() != null && ((ZLTextView) view).getModel().getParagraphsNumber() != 0) { ZLTextView textView = (ZLTextView) view; final int page = textView.computeCurrentPage(); final int pagesNumber = textView.computePageNumber(); bookPositionText.setText(makePositionText(page, pagesNumber)); bookPositionSlider.setVisibility(View.VISIBLE); bookPositionSlider.setMax(pagesNumber - 1); bookPositionSlider.setProgress(page - 1); } else { bookPositionText.setText(""); bookPositionSlider.setProgress(0); bookPositionSlider.setMax(1); bookPositionSlider.setVisibility(View.INVISIBLE); } if (SynchronousActivity.Instance != null) { SynchronousActivity.Instance.updateImage(); } }
diff --git a/src/com/android/browser/BrowserProvider.java b/src/com/android/browser/BrowserProvider.java index 42e22a53..ce156a7c 100644 --- a/src/com/android/browser/BrowserProvider.java +++ b/src/com/android/browser/BrowserProvider.java @@ -1,697 +1,698 @@ /* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.browser; import com.google.android.providers.GoogleSettings.Partner; import java.util.Date; import android.app.ISearchManager; import android.app.SearchManager; import android.content.ComponentName; import android.content.ContentProvider; import android.content.ContentUris; import android.content.ContentResolver; import android.content.ContentValues; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.UriMatcher; import android.content.SharedPreferences.Editor; import android.database.AbstractCursor; import android.database.Cursor; import android.database.sqlite.SQLiteOpenHelper; import android.database.sqlite.SQLiteDatabase; import android.net.Uri; import android.os.RemoteException; import android.os.ServiceManager; import android.os.SystemProperties; import android.preference.PreferenceManager; import android.provider.Browser; import android.util.Log; import android.server.search.SearchableInfo; import android.text.util.Regex; public class BrowserProvider extends ContentProvider { private SQLiteOpenHelper mOpenHelper; private static final String sDatabaseName = "browser.db"; private static final String TAG = "BrowserProvider"; private static final String ORDER_BY = "visits DESC, date DESC"; private static final String PICASA_URL = "http://picasaweb.google.com/m/" + "viewer?source=androidclient"; private static final String[] TABLE_NAMES = new String[] { "bookmarks", "searches" }; private static final String[] SUGGEST_PROJECTION = new String[] { "_id", "url", "title", "bookmark" }; private static final String SUGGEST_SELECTION = "url LIKE ? OR url LIKE ? OR url LIKE ? OR url LIKE ?"; private String[] SUGGEST_ARGS = new String[4]; // shared suggestion array index, make sure to match COLUMNS private static final int SUGGEST_COLUMN_INTENT_ACTION_ID = 1; private static final int SUGGEST_COLUMN_INTENT_DATA_ID = 2; private static final int SUGGEST_COLUMN_TEXT_1_ID = 3; private static final int SUGGEST_COLUMN_TEXT_2_ID = 4; private static final int SUGGEST_COLUMN_ICON_1_ID = 5; private static final int SUGGEST_COLUMN_ICON_2_ID = 6; private static final int SUGGEST_COLUMN_QUERY_ID = 7; // shared suggestion columns private static final String[] COLUMNS = new String[] { "_id", SearchManager.SUGGEST_COLUMN_INTENT_ACTION, SearchManager.SUGGEST_COLUMN_INTENT_DATA, SearchManager.SUGGEST_COLUMN_TEXT_1, SearchManager.SUGGEST_COLUMN_TEXT_2, SearchManager.SUGGEST_COLUMN_ICON_1, SearchManager.SUGGEST_COLUMN_ICON_2, SearchManager.SUGGEST_COLUMN_QUERY}; private static final int MAX_SUGGESTION_SHORT_ENTRIES = 3; private static final int MAX_SUGGESTION_LONG_ENTRIES = 6; // make sure that these match the index of TABLE_NAMES private static final int URI_MATCH_BOOKMARKS = 0; private static final int URI_MATCH_SEARCHES = 1; // (id % 10) should match the table name index private static final int URI_MATCH_BOOKMARKS_ID = 10; private static final int URI_MATCH_SEARCHES_ID = 11; // private static final int URI_MATCH_SUGGEST = 20; private static final UriMatcher URI_MATCHER; static { URI_MATCHER = new UriMatcher(UriMatcher.NO_MATCH); URI_MATCHER.addURI("browser", TABLE_NAMES[URI_MATCH_BOOKMARKS], URI_MATCH_BOOKMARKS); URI_MATCHER.addURI("browser", TABLE_NAMES[URI_MATCH_BOOKMARKS] + "/#", URI_MATCH_BOOKMARKS_ID); URI_MATCHER.addURI("browser", TABLE_NAMES[URI_MATCH_SEARCHES], URI_MATCH_SEARCHES); URI_MATCHER.addURI("browser", TABLE_NAMES[URI_MATCH_SEARCHES] + "/#", URI_MATCH_SEARCHES_ID); URI_MATCHER.addURI("browser", SearchManager.SUGGEST_URI_PATH_QUERY, URI_MATCH_SUGGEST); } // 1 -> 2 add cache table // 2 -> 3 update history table // 3 -> 4 add passwords table // 4 -> 5 add settings table // 5 -> 6 ? // 6 -> 7 ? // 7 -> 8 drop proxy table // 8 -> 9 drop settings table // 9 -> 10 add form_urls and form_data // 10 -> 11 add searches table // 11 -> 12 modify cache table // 12 -> 13 modify cache table // 13 -> 14 correspond with Google Bookmarks schema // 14 -> 15 move couple of tables to either browser private database or webview database // 15 -> 17 Set it up for the SearchManager // 17 -> 18 Added favicon in bookmarks table for Home shortcuts // 18 -> 19 Remove labels table private static final int DATABASE_VERSION = 19; public BrowserProvider() { } private static CharSequence replaceSystemPropertyInString(Context context, CharSequence srcString) { StringBuffer sb = new StringBuffer(); int lastCharLoc = 0; final String client_id = Partner.getString(context.getContentResolver(), Partner.CLIENT_ID); for (int i = 0; i < srcString.length(); ++i) { char c = srcString.charAt(i); if (c == '{') { sb.append(srcString.subSequence(lastCharLoc, i)); lastCharLoc = i; inner: for (int j = i; j < srcString.length(); ++j) { char k = srcString.charAt(j); if (k == '}') { String propertyKeyValue = srcString.subSequence(i + 1, j).toString(); if (propertyKeyValue.equals("CLIENT_ID")) { sb.append(client_id); } else { sb.append("unknown"); } lastCharLoc = j + 1; i = j; break inner; } } } } if (srcString.length() - lastCharLoc > 0) { // Put on the tail, if there is one sb.append(srcString.subSequence(lastCharLoc, srcString.length())); } return sb; } private static class DatabaseHelper extends SQLiteOpenHelper { private Context mContext; public DatabaseHelper(Context context) { super(context, sDatabaseName, null, DATABASE_VERSION); mContext = context; } @Override public void onCreate(SQLiteDatabase db) { db.execSQL("CREATE TABLE bookmarks (" + "_id INTEGER PRIMARY KEY," + "title TEXT," + "url TEXT," + "visits INTEGER," + "date LONG," + "created LONG," + "description TEXT," + "bookmark INTEGER," + "favicon BLOB DEFAULT NULL" + ");"); final CharSequence[] bookmarks = mContext.getResources() .getTextArray(R.array.bookmarks); int size = bookmarks.length; try { for (int i = 0; i < size; i = i + 2) { CharSequence bookmarkDestination = replaceSystemPropertyInString(mContext, bookmarks[i + 1]); db.execSQL("INSERT INTO bookmarks (title, url, visits, " + "date, created, bookmark)" + " VALUES('" + bookmarks[i] + "', '" + bookmarkDestination + "', 0, 0, 0, 1);"); } } catch (ArrayIndexOutOfBoundsException e) { } db.execSQL("CREATE TABLE searches (" + "_id INTEGER PRIMARY KEY," + "search TEXT," + "date LONG" + ");"); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { Log.w(TAG, "Upgrading database from version " + oldVersion + " to " + newVersion + ", which will destroy all old data"); if (oldVersion == 18) { db.execSQL("DROP TABLE IF EXISTS labels"); } else { db.execSQL("DROP TABLE IF EXISTS bookmarks"); db.execSQL("DROP TABLE IF EXISTS searches"); onCreate(db); } } } @Override public boolean onCreate() { final Context context = getContext(); mOpenHelper = new DatabaseHelper(context); // we added "picasa web album" into default bookmarks for version 19. // To avoid erasing the bookmark table, we added it explicitly for // version 18 and 19 as in the other cases, we will erase the table. if (DATABASE_VERSION == 18 || DATABASE_VERSION == 19) { SharedPreferences p = PreferenceManager .getDefaultSharedPreferences(context); boolean fix = p.getBoolean("fix_picasa", true); if (fix) { fixPicasaBookmark(); Editor ed = p.edit(); ed.putBoolean("fix_picasa", false); ed.commit(); } } return true; } private void fixPicasaBookmark() { SQLiteDatabase db = mOpenHelper.getWritableDatabase(); Cursor cursor = db.rawQuery("SELECT _id FROM bookmarks WHERE " + "bookmark = 1 AND url = ?", new String[] { PICASA_URL }); try { if (!cursor.moveToFirst()) { // set "created" so that it will be on the top of the list db.execSQL("INSERT INTO bookmarks (title, url, visits, " + "date, created, bookmark)" + " VALUES('" + getContext().getString(R.string.picasa) + "', '" + PICASA_URL + "', 0, 0, " + new Date().getTime() + ", 1);"); } } finally { if (cursor != null) { cursor.close(); } } } /* * Subclass AbstractCursor so we can combine multiple Cursors and add * "Google Search". * Here are the rules. * 1. We only have MAX_SUGGESTION_LONG_ENTRIES in the list plus * "Google Search"; * 2. If bookmark/history entries are less than * (MAX_SUGGESTION_SHORT_ENTRIES -1), we include Google suggest. */ private class MySuggestionCursor extends AbstractCursor { private Cursor mHistoryCursor; private Cursor mSuggestCursor; private int mHistoryCount; private int mSuggestionCount; private boolean mBeyondCursor; private String mString; public MySuggestionCursor(Cursor hc, Cursor sc, String string) { mHistoryCursor = hc; mSuggestCursor = sc; mHistoryCount = hc.getCount(); mSuggestionCount = sc != null ? sc.getCount() : 0; if (mSuggestionCount > (MAX_SUGGESTION_LONG_ENTRIES - mHistoryCount)) { mSuggestionCount = MAX_SUGGESTION_LONG_ENTRIES - mHistoryCount; } mString = string; mBeyondCursor = false; } @Override public boolean onMove(int oldPosition, int newPosition) { if (mHistoryCursor == null) { return false; } if (mHistoryCount > newPosition) { mHistoryCursor.moveToPosition(newPosition); mBeyondCursor = false; } else if (mHistoryCount + mSuggestionCount > newPosition) { mSuggestCursor.moveToPosition(newPosition - mHistoryCount); mBeyondCursor = false; } else { mBeyondCursor = true; } return true; } @Override public int getCount() { if (mString.length() > 0) { return mHistoryCount + mSuggestionCount + 1; } else { return mHistoryCount + mSuggestionCount; } } @Override public String[] getColumnNames() { return COLUMNS; } @Override public String getString(int columnIndex) { if ((mPos != -1 && mHistoryCursor != null)) { switch(columnIndex) { case SUGGEST_COLUMN_INTENT_ACTION_ID: if (mHistoryCount > mPos) { return Intent.ACTION_VIEW; } else { return Intent.ACTION_SEARCH; } case SUGGEST_COLUMN_INTENT_DATA_ID: if (mHistoryCount > mPos) { return mHistoryCursor.getString(1); } else { return null; } case SUGGEST_COLUMN_TEXT_1_ID: if (mHistoryCount > mPos) { return mHistoryCursor.getString(1); } else if (!mBeyondCursor) { return mSuggestCursor.getString(1); } else { return mString; } case SUGGEST_COLUMN_TEXT_2_ID: if (mHistoryCount > mPos) { return mHistoryCursor.getString(2); } else if (!mBeyondCursor) { return mSuggestCursor.getString(2); } else { return getContext().getString(R.string.search_google); } case SUGGEST_COLUMN_ICON_1_ID: if (mHistoryCount > mPos) { if (mHistoryCursor.getInt(3) == 1) { return new Integer( R.drawable.ic_search_category_bookmark) .toString(); } else { return new Integer( R.drawable.ic_search_category_history) .toString(); } } else { return new Integer( R.drawable.ic_search_category_suggest) .toString(); } case SUGGEST_COLUMN_ICON_2_ID: return new String("0"); case SUGGEST_COLUMN_QUERY_ID: if (mHistoryCount > mPos) { return null; } else if (!mBeyondCursor) { return mSuggestCursor.getString(3); } else { return mString; } } } return null; } @Override public double getDouble(int column) { throw new UnsupportedOperationException(); } @Override public float getFloat(int column) { throw new UnsupportedOperationException(); } @Override public int getInt(int column) { throw new UnsupportedOperationException(); } @Override public long getLong(int column) { if ((mPos != -1) && column == 0) { return mPos; // use row# as the _Id } throw new UnsupportedOperationException(); } @Override public short getShort(int column) { throw new UnsupportedOperationException(); } @Override public boolean isNull(int column) { throw new UnsupportedOperationException(); } // TODO Temporary change, finalize after jq's changes go in public void deactivate() { if (mHistoryCursor != null) { mHistoryCursor.deactivate(); } if (mSuggestCursor != null) { mSuggestCursor.deactivate(); } super.deactivate(); } public boolean requery() { return (mHistoryCursor != null ? mHistoryCursor.requery() : false) | (mSuggestCursor != null ? mSuggestCursor.requery() : false); } // TODO Temporary change, finalize after jq's changes go in public void close() { super.close(); if (mHistoryCursor != null) { mHistoryCursor.close(); mHistoryCursor = null; } if (mSuggestCursor != null) { mSuggestCursor.close(); mSuggestCursor = null; } } } @Override public Cursor query(Uri url, String[] projectionIn, String selection, String[] selectionArgs, String sortOrder) throws IllegalStateException { SQLiteDatabase db = mOpenHelper.getReadableDatabase(); int match = URI_MATCHER.match(url); if (match == -1) { throw new IllegalArgumentException("Unknown URL"); } if (match == URI_MATCH_SUGGEST) { String suggestSelection; String [] myArgs; if (selectionArgs[0] == null || selectionArgs[0].equals("")) { suggestSelection = null; myArgs = null; } else { String like = selectionArgs[0] + "%"; - if (selectionArgs[0].startsWith("http")) { + if (selectionArgs[0].startsWith("http") + || selectionArgs[0].startsWith("file")) { myArgs = new String[1]; myArgs[0] = like; suggestSelection = selection; } else { SUGGEST_ARGS[0] = "http://" + like; SUGGEST_ARGS[1] = "http://www." + like; SUGGEST_ARGS[2] = "https://" + like; SUGGEST_ARGS[3] = "https://www." + like; myArgs = SUGGEST_ARGS; suggestSelection = SUGGEST_SELECTION; } } Cursor c = db.query(TABLE_NAMES[URI_MATCH_BOOKMARKS], SUGGEST_PROJECTION, suggestSelection, myArgs, null, null, ORDER_BY, (new Integer(MAX_SUGGESTION_LONG_ENTRIES)).toString()); if (Regex.WEB_URL_PATTERN.matcher(selectionArgs[0]).matches()) { return new MySuggestionCursor(c, null, ""); } else { // get Google suggest if there is still space in the list if (myArgs != null && myArgs.length > 1 && c.getCount() < (MAX_SUGGESTION_SHORT_ENTRIES - 1)) { ISearchManager sm = ISearchManager.Stub .asInterface(ServiceManager .getService(Context.SEARCH_SERVICE)); SearchableInfo si = null; try { // use the global search to get Google suggest provider si = sm.getSearchableInfo(new ComponentName( getContext(), "com.android.browser"), true); // similar to the getSuggestions() in SearchDialog.java StringBuilder uriStr = new StringBuilder("content://"); uriStr.append(si.getSuggestAuthority()); // if content path provided, insert it now final String contentPath = si.getSuggestPath(); if (contentPath != null) { uriStr.append('/'); uriStr.append(contentPath); } // append standard suggestion query path uriStr.append('/' + SearchManager.SUGGEST_URI_PATH_QUERY); // inject query, either as selection args or inline String[] selArgs = null; if (si.getSuggestSelection() != null) { selArgs = new String[] {selectionArgs[0]}; } else { uriStr.append('/'); uriStr.append(Uri.encode(selectionArgs[0])); } // finally, make the query Cursor sc = getContext().getContentResolver().query( Uri.parse(uriStr.toString()), null, si.getSuggestSelection(), selArgs, null); return new MySuggestionCursor(c, sc, selectionArgs[0]); } catch (RemoteException e) { } } return new MySuggestionCursor(c, null, selectionArgs[0]); } } String[] projection = null; if (projectionIn != null && projectionIn.length > 0) { projection = new String[projectionIn.length + 1]; System.arraycopy(projectionIn, 0, projection, 0, projectionIn.length); projection[projectionIn.length] = "_id AS _id"; } StringBuilder whereClause = new StringBuilder(256); if (match == URI_MATCH_BOOKMARKS_ID || match == URI_MATCH_SEARCHES_ID) { whereClause.append("(_id = ").append(url.getPathSegments().get(1)) .append(")"); } // Tack on the user's selection, if present if (selection != null && selection.length() > 0) { if (whereClause.length() > 0) { whereClause.append(" AND "); } whereClause.append('('); whereClause.append(selection); whereClause.append(')'); } Cursor c = db.query(TABLE_NAMES[match % 10], projection, whereClause.toString(), selectionArgs, null, null, sortOrder, null); c.setNotificationUri(getContext().getContentResolver(), url); return c; } @Override public String getType(Uri url) { int match = URI_MATCHER.match(url); switch (match) { case URI_MATCH_BOOKMARKS: return "vnd.android.cursor.dir/bookmark"; case URI_MATCH_BOOKMARKS_ID: return "vnd.android.cursor.item/bookmark"; case URI_MATCH_SEARCHES: return "vnd.android.cursor.dir/searches"; case URI_MATCH_SEARCHES_ID: return "vnd.android.cursor.item/searches"; case URI_MATCH_SUGGEST: return SearchManager.SUGGEST_MIME_TYPE; default: throw new IllegalArgumentException("Unknown URL"); } } @Override public Uri insert(Uri url, ContentValues initialValues) { SQLiteDatabase db = mOpenHelper.getWritableDatabase(); int match = URI_MATCHER.match(url); Uri uri = null; switch (match) { case URI_MATCH_BOOKMARKS: { // Insert into the bookmarks table long rowID = db.insert(TABLE_NAMES[URI_MATCH_BOOKMARKS], "url", initialValues); if (rowID > 0) { uri = ContentUris.withAppendedId(Browser.BOOKMARKS_URI, rowID); } break; } case URI_MATCH_SEARCHES: { // Insert into the searches table long rowID = db.insert(TABLE_NAMES[URI_MATCH_SEARCHES], "url", initialValues); if (rowID > 0) { uri = ContentUris.withAppendedId(Browser.SEARCHES_URI, rowID); } break; } default: throw new IllegalArgumentException("Unknown URL"); } if (uri == null) { throw new IllegalArgumentException("Unknown URL"); } getContext().getContentResolver().notifyChange(uri, null); return uri; } @Override public int delete(Uri url, String where, String[] whereArgs) { SQLiteDatabase db = mOpenHelper.getWritableDatabase(); int match = URI_MATCHER.match(url); if (match == -1 || match == URI_MATCH_SUGGEST) { throw new IllegalArgumentException("Unknown URL"); } if (match == URI_MATCH_BOOKMARKS_ID || match == URI_MATCH_SEARCHES_ID) { StringBuilder sb = new StringBuilder(); if (where != null && where.length() > 0) { sb.append("( "); sb.append(where); sb.append(" ) AND "); } sb.append("_id = "); sb.append(url.getPathSegments().get(1)); where = sb.toString(); } int count = db.delete(TABLE_NAMES[match % 10], where, whereArgs); getContext().getContentResolver().notifyChange(url, null); return count; } @Override public int update(Uri url, ContentValues values, String where, String[] whereArgs) { SQLiteDatabase db = mOpenHelper.getWritableDatabase(); int match = URI_MATCHER.match(url); if (match == -1 || match == URI_MATCH_SUGGEST) { throw new IllegalArgumentException("Unknown URL"); } if (match == URI_MATCH_BOOKMARKS_ID || match == URI_MATCH_SEARCHES_ID) { StringBuilder sb = new StringBuilder(); if (where != null && where.length() > 0) { sb.append("( "); sb.append(where); sb.append(" ) AND "); } sb.append("_id = "); sb.append(url.getPathSegments().get(1)); where = sb.toString(); } int ret = db.update(TABLE_NAMES[match % 10], values, where, whereArgs); getContext().getContentResolver().notifyChange(url, null); return ret; } }
true
true
public Cursor query(Uri url, String[] projectionIn, String selection, String[] selectionArgs, String sortOrder) throws IllegalStateException { SQLiteDatabase db = mOpenHelper.getReadableDatabase(); int match = URI_MATCHER.match(url); if (match == -1) { throw new IllegalArgumentException("Unknown URL"); } if (match == URI_MATCH_SUGGEST) { String suggestSelection; String [] myArgs; if (selectionArgs[0] == null || selectionArgs[0].equals("")) { suggestSelection = null; myArgs = null; } else { String like = selectionArgs[0] + "%"; if (selectionArgs[0].startsWith("http")) { myArgs = new String[1]; myArgs[0] = like; suggestSelection = selection; } else { SUGGEST_ARGS[0] = "http://" + like; SUGGEST_ARGS[1] = "http://www." + like; SUGGEST_ARGS[2] = "https://" + like; SUGGEST_ARGS[3] = "https://www." + like; myArgs = SUGGEST_ARGS; suggestSelection = SUGGEST_SELECTION; } } Cursor c = db.query(TABLE_NAMES[URI_MATCH_BOOKMARKS], SUGGEST_PROJECTION, suggestSelection, myArgs, null, null, ORDER_BY, (new Integer(MAX_SUGGESTION_LONG_ENTRIES)).toString()); if (Regex.WEB_URL_PATTERN.matcher(selectionArgs[0]).matches()) { return new MySuggestionCursor(c, null, ""); } else { // get Google suggest if there is still space in the list if (myArgs != null && myArgs.length > 1 && c.getCount() < (MAX_SUGGESTION_SHORT_ENTRIES - 1)) { ISearchManager sm = ISearchManager.Stub .asInterface(ServiceManager .getService(Context.SEARCH_SERVICE)); SearchableInfo si = null; try { // use the global search to get Google suggest provider si = sm.getSearchableInfo(new ComponentName( getContext(), "com.android.browser"), true); // similar to the getSuggestions() in SearchDialog.java StringBuilder uriStr = new StringBuilder("content://"); uriStr.append(si.getSuggestAuthority()); // if content path provided, insert it now final String contentPath = si.getSuggestPath(); if (contentPath != null) { uriStr.append('/'); uriStr.append(contentPath); } // append standard suggestion query path uriStr.append('/' + SearchManager.SUGGEST_URI_PATH_QUERY); // inject query, either as selection args or inline String[] selArgs = null; if (si.getSuggestSelection() != null) { selArgs = new String[] {selectionArgs[0]}; } else { uriStr.append('/'); uriStr.append(Uri.encode(selectionArgs[0])); } // finally, make the query Cursor sc = getContext().getContentResolver().query( Uri.parse(uriStr.toString()), null, si.getSuggestSelection(), selArgs, null); return new MySuggestionCursor(c, sc, selectionArgs[0]); } catch (RemoteException e) { } } return new MySuggestionCursor(c, null, selectionArgs[0]); } } String[] projection = null; if (projectionIn != null && projectionIn.length > 0) { projection = new String[projectionIn.length + 1]; System.arraycopy(projectionIn, 0, projection, 0, projectionIn.length); projection[projectionIn.length] = "_id AS _id"; } StringBuilder whereClause = new StringBuilder(256); if (match == URI_MATCH_BOOKMARKS_ID || match == URI_MATCH_SEARCHES_ID) { whereClause.append("(_id = ").append(url.getPathSegments().get(1)) .append(")"); } // Tack on the user's selection, if present if (selection != null && selection.length() > 0) { if (whereClause.length() > 0) { whereClause.append(" AND "); } whereClause.append('('); whereClause.append(selection); whereClause.append(')'); } Cursor c = db.query(TABLE_NAMES[match % 10], projection, whereClause.toString(), selectionArgs, null, null, sortOrder, null); c.setNotificationUri(getContext().getContentResolver(), url); return c; }
public Cursor query(Uri url, String[] projectionIn, String selection, String[] selectionArgs, String sortOrder) throws IllegalStateException { SQLiteDatabase db = mOpenHelper.getReadableDatabase(); int match = URI_MATCHER.match(url); if (match == -1) { throw new IllegalArgumentException("Unknown URL"); } if (match == URI_MATCH_SUGGEST) { String suggestSelection; String [] myArgs; if (selectionArgs[0] == null || selectionArgs[0].equals("")) { suggestSelection = null; myArgs = null; } else { String like = selectionArgs[0] + "%"; if (selectionArgs[0].startsWith("http") || selectionArgs[0].startsWith("file")) { myArgs = new String[1]; myArgs[0] = like; suggestSelection = selection; } else { SUGGEST_ARGS[0] = "http://" + like; SUGGEST_ARGS[1] = "http://www." + like; SUGGEST_ARGS[2] = "https://" + like; SUGGEST_ARGS[3] = "https://www." + like; myArgs = SUGGEST_ARGS; suggestSelection = SUGGEST_SELECTION; } } Cursor c = db.query(TABLE_NAMES[URI_MATCH_BOOKMARKS], SUGGEST_PROJECTION, suggestSelection, myArgs, null, null, ORDER_BY, (new Integer(MAX_SUGGESTION_LONG_ENTRIES)).toString()); if (Regex.WEB_URL_PATTERN.matcher(selectionArgs[0]).matches()) { return new MySuggestionCursor(c, null, ""); } else { // get Google suggest if there is still space in the list if (myArgs != null && myArgs.length > 1 && c.getCount() < (MAX_SUGGESTION_SHORT_ENTRIES - 1)) { ISearchManager sm = ISearchManager.Stub .asInterface(ServiceManager .getService(Context.SEARCH_SERVICE)); SearchableInfo si = null; try { // use the global search to get Google suggest provider si = sm.getSearchableInfo(new ComponentName( getContext(), "com.android.browser"), true); // similar to the getSuggestions() in SearchDialog.java StringBuilder uriStr = new StringBuilder("content://"); uriStr.append(si.getSuggestAuthority()); // if content path provided, insert it now final String contentPath = si.getSuggestPath(); if (contentPath != null) { uriStr.append('/'); uriStr.append(contentPath); } // append standard suggestion query path uriStr.append('/' + SearchManager.SUGGEST_URI_PATH_QUERY); // inject query, either as selection args or inline String[] selArgs = null; if (si.getSuggestSelection() != null) { selArgs = new String[] {selectionArgs[0]}; } else { uriStr.append('/'); uriStr.append(Uri.encode(selectionArgs[0])); } // finally, make the query Cursor sc = getContext().getContentResolver().query( Uri.parse(uriStr.toString()), null, si.getSuggestSelection(), selArgs, null); return new MySuggestionCursor(c, sc, selectionArgs[0]); } catch (RemoteException e) { } } return new MySuggestionCursor(c, null, selectionArgs[0]); } } String[] projection = null; if (projectionIn != null && projectionIn.length > 0) { projection = new String[projectionIn.length + 1]; System.arraycopy(projectionIn, 0, projection, 0, projectionIn.length); projection[projectionIn.length] = "_id AS _id"; } StringBuilder whereClause = new StringBuilder(256); if (match == URI_MATCH_BOOKMARKS_ID || match == URI_MATCH_SEARCHES_ID) { whereClause.append("(_id = ").append(url.getPathSegments().get(1)) .append(")"); } // Tack on the user's selection, if present if (selection != null && selection.length() > 0) { if (whereClause.length() > 0) { whereClause.append(" AND "); } whereClause.append('('); whereClause.append(selection); whereClause.append(')'); } Cursor c = db.query(TABLE_NAMES[match % 10], projection, whereClause.toString(), selectionArgs, null, null, sortOrder, null); c.setNotificationUri(getContext().getContentResolver(), url); return c; }
diff --git a/src/com/worxforus/net/SyncTableManager.java b/src/com/worxforus/net/SyncTableManager.java index e79579b..2fe5bb1 100644 --- a/src/com/worxforus/net/SyncTableManager.java +++ b/src/com/worxforus/net/SyncTableManager.java @@ -1,167 +1,168 @@ package com.worxforus.net; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.concurrent.TimeUnit; import org.apache.http.NameValuePair; import android.content.Context; import android.util.Log; import com.worxforus.Pool; import com.worxforus.Result; import com.worxforus.SyncEntry; import com.worxforus.Utils; import com.worxforus.db.TableInterface; import com.worxforus.db.TableManager; import com.worxforus.db.TableSyncDb; import com.worxforus.json.JSONExceptionWrapper; import com.worxforus.net.NetAuthentication.NetAuthenticationHelper; /** * This class performs the actual synching with the network * @author sbossen * */ public class SyncTableManager { //Sync Identifiers public static final String ITEM_LIMIT_PER_PAGE = "item_limit_per_page"; //number of items to send for a given page public static final String ITEMS_TOTAL = "items_total"; //number of items found for given request (not including paging) public static final String SELECTED_PAGE = "selected_page"; //the current page of items to retrieve based on the number of items to send on a given page public static final String ITEMS_SENT = "items_sent"; //The number of items actually sent for this request - can be equal to or less that the page item limit public static final String ITEMS = "items" ; //The items requested will be stored under this JSON header public static final String TO_DATETIME = "to_datetime"; //Calculated on server, the datetime that the start of request was received - sent back to the device for later synchronization requests public static final String FROM_DATETIME = "from_datetime"; //The datetime that the device previously has data for. Will be blank if never synchronized. public static final int SYNC_TTL_SECS = 86400; //86400 sec=24 hours - This is how long we need to have from the last sync for recommending a sync public <T> Result addToDatabaseHelper(Context c, ArrayList<T> objects, String dbName, TableInterface<T> table) { Log.i(this.getClass().getName(), "Adding retrieved network objects to:"+table.getTableName()); TableManager.acquireConnection(c, dbName, table); Result r = table.insertOrUpdateArrayList(objects); TableManager.releaseConnection(table); return r; } public <T> Result handleParsedObjectsHelper(SyncTransactionInfo sync, Context c, String dbName, TableInterface<T> table, SyncInterface<T> syncObject, Pool<T> pool) { Log.i(this.getClass().getName(), "Parsing network "+syncObject.getClass().getName()+" objects"); Result r = syncObject.parseJSONtoArrayList(sync.jsonItemArray, pool); ArrayList<T> parseObjects = (ArrayList<T>) r.object; //check that retrieve items equals what we found if (parseObjects.size() != sync.retrievedItems) { r.success = false; r.error = "Retrieve "+sync.getClass().getName()+" items "+parseObjects.size()+" did not match expected value: "+sync.retrievedItems; Log.e(this.getClass().getName(), r.error); return r; } //store data into database r.add_results_if_error(addToDatabaseHelper(c, parseObjects, dbName, table), "Could not add objects to database"); //once objects have been added to the database, we can release all objects back to the pool pool.freeAll(parseObjects); return r; } public <T> Result handleSyncTableDownload(Context c, String host, String dbName, TableInterface<T> table, SyncInterface<T> syncObject, int limitPerPage, String lastSync) { Log.i(this.getClass().getName(), "Performing sync network operation for:"+table.getTableName()); //create object pool to pass around Pool<T> pool = new Pool<T>(syncObject, limitPerPage); Result r = new Result(); //make net call String url = syncObject.getDownloadURL(host); //For the first call we don't know what the toDate is because we get that from the server List<NameValuePair> params = syncObject.getDownloadParams(0, limitPerPage, lastSync, ""); NetResult netResult; if (syncObject.requireAuthOnDownload()) { netResult = AuthNetHandler.handleAuthPostWithRetry(url, params , NetHandler.NETWORK_DEFAULT_RETRY_ATTEMPTS); } else { netResult = NetHandler.handlePostWithRetry(url, params , NetHandler.NETWORK_DEFAULT_RETRY_ATTEMPTS); } NetHandler.handleGenericJsonResponseHelper(netResult, this.getClass().getName()); //get json array r.success = netResult.net_success; SyncTransactionInfo syncInfo = new SyncTransactionInfo(); if (netResult.net_success) { try { syncInfo.loadFirstTransactionDetails(netResult); Result handleResult = handleParsedObjectsHelper(syncInfo, c, dbName, table, syncObject, pool); r.add_results_if_error(handleResult, handleResult.error); } catch (JSONExceptionWrapper e) { r.error += "Could not parse JSON "+syncObject.getClass().getName()+" info:"+e.getMessage(); r.success = false; Log.e(this.getClass().getName(), e.getMessage()); } //close first network connection netResult.closeNetResult(); //process until all items are collected for (int cur_page = 1; cur_page < syncInfo.numPages; cur_page++) { if(netResult.net_success) { //now we know what the toDate should be so send that along params = syncObject.getDownloadParams(cur_page, syncInfo.itemLimitPerPage, lastSync, syncInfo.toDatetime); if (syncObject.requireAuthOnDownload()) { netResult = AuthNetHandler.handleAuthPostWithRetry(url, params , NetHandler.NETWORK_DEFAULT_RETRY_ATTEMPTS); } else { netResult = NetHandler.handlePostWithRetry(url, params , NetHandler.NETWORK_DEFAULT_RETRY_ATTEMPTS); } NetHandler.handleGenericJsonResponseHelper(netResult, this.getClass().getName()); if (netResult.net_success) { try { syncInfo.loadTransactionDetails(netResult); Result handleResult = handleParsedObjectsHelper(syncInfo, c, dbName, table, syncObject, pool); r.add_results_if_error(handleResult, handleResult.error); } catch (JSONExceptionWrapper e) { r.error += "Could not parse JSON "+syncObject.getClass().getName()+" info:"+e.getMessage(); r.success = false; Log.e(this.getClass().getName(), e.getMessage()); } } else { //we got a network error, because r.success was true, but something went wrong r.success = false; r.add_error(netResult.getLogEntry(), false); } } else { //we got a network error, because r.success was true, but something went wrong r.success = false; r.add_error(netResult.getLogEntry(), false); } } //update synchronized date if no errors occurred if (r.success) { SyncEntry se= TableManager.getTableSyncInfo(c, dbName, table.getTableName()); se.setDownloadDate(syncInfo.toDatetime); r.add_results_if_error(TableManager.setTableSyncInfo(c, dbName, se), "Could not update table ["+table.getTableName()+"] with latest sync date of: "+syncInfo.toDatetime); } } else { r.technical_error = netResult.getLogEntry(); + r.error = "Could not reach page for ["+table.getTableName()+"] because of a network error."; r.success = false; } Log.i(this.getClass().getName(), "Finished sync network operation, result:"+r.toString()); return r; } public boolean checkIfSyncNeeded(Context c, String dbName, String tableName, TableSyncDb syncDb) { Result r = new Result(); Date curDate = Utils.getCurrentDatetime(); TableManager.acquireConnection(c, dbName, syncDb); SyncEntry info = syncDb.getTableSyncData(tableName); Date syncDate = Utils.getDatetimeObject(info.getDownloadDate()); TableManager.releaseConnection(syncDb); if (Utils.getDateDiff(syncDate, curDate, TimeUnit.SECONDS) > SYNC_TTL_SECS) { return true; } else return false; } }
true
true
public <T> Result handleSyncTableDownload(Context c, String host, String dbName, TableInterface<T> table, SyncInterface<T> syncObject, int limitPerPage, String lastSync) { Log.i(this.getClass().getName(), "Performing sync network operation for:"+table.getTableName()); //create object pool to pass around Pool<T> pool = new Pool<T>(syncObject, limitPerPage); Result r = new Result(); //make net call String url = syncObject.getDownloadURL(host); //For the first call we don't know what the toDate is because we get that from the server List<NameValuePair> params = syncObject.getDownloadParams(0, limitPerPage, lastSync, ""); NetResult netResult; if (syncObject.requireAuthOnDownload()) { netResult = AuthNetHandler.handleAuthPostWithRetry(url, params , NetHandler.NETWORK_DEFAULT_RETRY_ATTEMPTS); } else { netResult = NetHandler.handlePostWithRetry(url, params , NetHandler.NETWORK_DEFAULT_RETRY_ATTEMPTS); } NetHandler.handleGenericJsonResponseHelper(netResult, this.getClass().getName()); //get json array r.success = netResult.net_success; SyncTransactionInfo syncInfo = new SyncTransactionInfo(); if (netResult.net_success) { try { syncInfo.loadFirstTransactionDetails(netResult); Result handleResult = handleParsedObjectsHelper(syncInfo, c, dbName, table, syncObject, pool); r.add_results_if_error(handleResult, handleResult.error); } catch (JSONExceptionWrapper e) { r.error += "Could not parse JSON "+syncObject.getClass().getName()+" info:"+e.getMessage(); r.success = false; Log.e(this.getClass().getName(), e.getMessage()); } //close first network connection netResult.closeNetResult(); //process until all items are collected for (int cur_page = 1; cur_page < syncInfo.numPages; cur_page++) { if(netResult.net_success) { //now we know what the toDate should be so send that along params = syncObject.getDownloadParams(cur_page, syncInfo.itemLimitPerPage, lastSync, syncInfo.toDatetime); if (syncObject.requireAuthOnDownload()) { netResult = AuthNetHandler.handleAuthPostWithRetry(url, params , NetHandler.NETWORK_DEFAULT_RETRY_ATTEMPTS); } else { netResult = NetHandler.handlePostWithRetry(url, params , NetHandler.NETWORK_DEFAULT_RETRY_ATTEMPTS); } NetHandler.handleGenericJsonResponseHelper(netResult, this.getClass().getName()); if (netResult.net_success) { try { syncInfo.loadTransactionDetails(netResult); Result handleResult = handleParsedObjectsHelper(syncInfo, c, dbName, table, syncObject, pool); r.add_results_if_error(handleResult, handleResult.error); } catch (JSONExceptionWrapper e) { r.error += "Could not parse JSON "+syncObject.getClass().getName()+" info:"+e.getMessage(); r.success = false; Log.e(this.getClass().getName(), e.getMessage()); } } else { //we got a network error, because r.success was true, but something went wrong r.success = false; r.add_error(netResult.getLogEntry(), false); } } else { //we got a network error, because r.success was true, but something went wrong r.success = false; r.add_error(netResult.getLogEntry(), false); } } //update synchronized date if no errors occurred if (r.success) { SyncEntry se= TableManager.getTableSyncInfo(c, dbName, table.getTableName()); se.setDownloadDate(syncInfo.toDatetime); r.add_results_if_error(TableManager.setTableSyncInfo(c, dbName, se), "Could not update table ["+table.getTableName()+"] with latest sync date of: "+syncInfo.toDatetime); } } else { r.technical_error = netResult.getLogEntry(); r.success = false; } Log.i(this.getClass().getName(), "Finished sync network operation, result:"+r.toString()); return r; }
public <T> Result handleSyncTableDownload(Context c, String host, String dbName, TableInterface<T> table, SyncInterface<T> syncObject, int limitPerPage, String lastSync) { Log.i(this.getClass().getName(), "Performing sync network operation for:"+table.getTableName()); //create object pool to pass around Pool<T> pool = new Pool<T>(syncObject, limitPerPage); Result r = new Result(); //make net call String url = syncObject.getDownloadURL(host); //For the first call we don't know what the toDate is because we get that from the server List<NameValuePair> params = syncObject.getDownloadParams(0, limitPerPage, lastSync, ""); NetResult netResult; if (syncObject.requireAuthOnDownload()) { netResult = AuthNetHandler.handleAuthPostWithRetry(url, params , NetHandler.NETWORK_DEFAULT_RETRY_ATTEMPTS); } else { netResult = NetHandler.handlePostWithRetry(url, params , NetHandler.NETWORK_DEFAULT_RETRY_ATTEMPTS); } NetHandler.handleGenericJsonResponseHelper(netResult, this.getClass().getName()); //get json array r.success = netResult.net_success; SyncTransactionInfo syncInfo = new SyncTransactionInfo(); if (netResult.net_success) { try { syncInfo.loadFirstTransactionDetails(netResult); Result handleResult = handleParsedObjectsHelper(syncInfo, c, dbName, table, syncObject, pool); r.add_results_if_error(handleResult, handleResult.error); } catch (JSONExceptionWrapper e) { r.error += "Could not parse JSON "+syncObject.getClass().getName()+" info:"+e.getMessage(); r.success = false; Log.e(this.getClass().getName(), e.getMessage()); } //close first network connection netResult.closeNetResult(); //process until all items are collected for (int cur_page = 1; cur_page < syncInfo.numPages; cur_page++) { if(netResult.net_success) { //now we know what the toDate should be so send that along params = syncObject.getDownloadParams(cur_page, syncInfo.itemLimitPerPage, lastSync, syncInfo.toDatetime); if (syncObject.requireAuthOnDownload()) { netResult = AuthNetHandler.handleAuthPostWithRetry(url, params , NetHandler.NETWORK_DEFAULT_RETRY_ATTEMPTS); } else { netResult = NetHandler.handlePostWithRetry(url, params , NetHandler.NETWORK_DEFAULT_RETRY_ATTEMPTS); } NetHandler.handleGenericJsonResponseHelper(netResult, this.getClass().getName()); if (netResult.net_success) { try { syncInfo.loadTransactionDetails(netResult); Result handleResult = handleParsedObjectsHelper(syncInfo, c, dbName, table, syncObject, pool); r.add_results_if_error(handleResult, handleResult.error); } catch (JSONExceptionWrapper e) { r.error += "Could not parse JSON "+syncObject.getClass().getName()+" info:"+e.getMessage(); r.success = false; Log.e(this.getClass().getName(), e.getMessage()); } } else { //we got a network error, because r.success was true, but something went wrong r.success = false; r.add_error(netResult.getLogEntry(), false); } } else { //we got a network error, because r.success was true, but something went wrong r.success = false; r.add_error(netResult.getLogEntry(), false); } } //update synchronized date if no errors occurred if (r.success) { SyncEntry se= TableManager.getTableSyncInfo(c, dbName, table.getTableName()); se.setDownloadDate(syncInfo.toDatetime); r.add_results_if_error(TableManager.setTableSyncInfo(c, dbName, se), "Could not update table ["+table.getTableName()+"] with latest sync date of: "+syncInfo.toDatetime); } } else { r.technical_error = netResult.getLogEntry(); r.error = "Could not reach page for ["+table.getTableName()+"] because of a network error."; r.success = false; } Log.i(this.getClass().getName(), "Finished sync network operation, result:"+r.toString()); return r; }
diff --git a/onebusaway-sms-webapp/src/main/java/org/onebusaway/sms/actions/StopByNumberAction.java b/onebusaway-sms-webapp/src/main/java/org/onebusaway/sms/actions/StopByNumberAction.java index 03499e3d..11a7a351 100644 --- a/onebusaway-sms-webapp/src/main/java/org/onebusaway/sms/actions/StopByNumberAction.java +++ b/onebusaway-sms-webapp/src/main/java/org/onebusaway/sms/actions/StopByNumberAction.java @@ -1,112 +1,112 @@ package org.onebusaway.sms.actions; import java.util.List; import org.onebusaway.exceptions.ServiceException; import org.onebusaway.geospatial.model.CoordinateBounds; import org.onebusaway.presentation.services.ServiceAreaService; import org.onebusaway.transit_data.model.SearchQueryBean; import org.onebusaway.transit_data.model.StopBean; import org.onebusaway.transit_data.model.StopsBean; import org.onebusaway.transit_data.model.SearchQueryBean.EQueryType; import org.springframework.beans.factory.annotation.Autowired; public class StopByNumberAction extends AbstractTextmarksAction { private static final long serialVersionUID = 1L; private ServiceAreaService _serviceAreaService; private String _stopQuery; private List<StopBean> _stops; private int _selectedIndex = -1; private String _stopId; private String[] _args; @Autowired public void setServiceAreaService(ServiceAreaService serviceAreaService) { _serviceAreaService = serviceAreaService; } public String getStopQuery() { return _stopQuery; } public List<StopBean> getStops() { return _stops; } public void setSelectedIndex(int selectedIndex) { _selectedIndex = selectedIndex; } public String getStopId() { return _stopId; } public String[] getArgs() { return _args; } @Override public String execute() throws ServiceException { if (_text != null) _text.trim(); if (_text == null || _text.length() == 0) return INPUT; String[] tokens = _text.trim().split("\\s+"); if (tokens.length == 0) return INPUT; CoordinateBounds serviceArea = _serviceAreaService.getServiceArea(); if (serviceArea == null) { pushNextAction("stop-by-number", "text", _text); return "query-default-search-location"; } _stopQuery = tokens[0]; SearchQueryBean searchQuery = new SearchQueryBean(); searchQuery.setBounds(serviceArea); searchQuery.setMaxCount(5); searchQuery.setType(EQueryType.BOUNDS_OR_CLOSEST); searchQuery.setQuery(_stopQuery); StopsBean results = _transitDataService.getStops(searchQuery); _stops = results.getStops(); logUserInteraction("stopQuery", _stopQuery); int stopIndex = 0; if (_stops.isEmpty()) { return "noStopsFound"; } else if (_stops.size() > 1) { if (0 <= _selectedIndex && _selectedIndex < _stops.size()) { stopIndex = _selectedIndex; } else { - pushNextAction("arrivals-and-departures", "text", _text); + pushNextAction("stop-by-number", "text", _text); pushNextAction("handle-multi-selection"); return "multipleStopsFound"; } } StopBean stop = _stops.get(stopIndex); _stopId = stop.getId(); _args = new String[tokens.length - 1]; System.arraycopy(tokens, 1, _args, 0, _args.length); return "arrivals-and-departures"; } }
true
true
public String execute() throws ServiceException { if (_text != null) _text.trim(); if (_text == null || _text.length() == 0) return INPUT; String[] tokens = _text.trim().split("\\s+"); if (tokens.length == 0) return INPUT; CoordinateBounds serviceArea = _serviceAreaService.getServiceArea(); if (serviceArea == null) { pushNextAction("stop-by-number", "text", _text); return "query-default-search-location"; } _stopQuery = tokens[0]; SearchQueryBean searchQuery = new SearchQueryBean(); searchQuery.setBounds(serviceArea); searchQuery.setMaxCount(5); searchQuery.setType(EQueryType.BOUNDS_OR_CLOSEST); searchQuery.setQuery(_stopQuery); StopsBean results = _transitDataService.getStops(searchQuery); _stops = results.getStops(); logUserInteraction("stopQuery", _stopQuery); int stopIndex = 0; if (_stops.isEmpty()) { return "noStopsFound"; } else if (_stops.size() > 1) { if (0 <= _selectedIndex && _selectedIndex < _stops.size()) { stopIndex = _selectedIndex; } else { pushNextAction("arrivals-and-departures", "text", _text); pushNextAction("handle-multi-selection"); return "multipleStopsFound"; } } StopBean stop = _stops.get(stopIndex); _stopId = stop.getId(); _args = new String[tokens.length - 1]; System.arraycopy(tokens, 1, _args, 0, _args.length); return "arrivals-and-departures"; }
public String execute() throws ServiceException { if (_text != null) _text.trim(); if (_text == null || _text.length() == 0) return INPUT; String[] tokens = _text.trim().split("\\s+"); if (tokens.length == 0) return INPUT; CoordinateBounds serviceArea = _serviceAreaService.getServiceArea(); if (serviceArea == null) { pushNextAction("stop-by-number", "text", _text); return "query-default-search-location"; } _stopQuery = tokens[0]; SearchQueryBean searchQuery = new SearchQueryBean(); searchQuery.setBounds(serviceArea); searchQuery.setMaxCount(5); searchQuery.setType(EQueryType.BOUNDS_OR_CLOSEST); searchQuery.setQuery(_stopQuery); StopsBean results = _transitDataService.getStops(searchQuery); _stops = results.getStops(); logUserInteraction("stopQuery", _stopQuery); int stopIndex = 0; if (_stops.isEmpty()) { return "noStopsFound"; } else if (_stops.size() > 1) { if (0 <= _selectedIndex && _selectedIndex < _stops.size()) { stopIndex = _selectedIndex; } else { pushNextAction("stop-by-number", "text", _text); pushNextAction("handle-multi-selection"); return "multipleStopsFound"; } } StopBean stop = _stops.get(stopIndex); _stopId = stop.getId(); _args = new String[tokens.length - 1]; System.arraycopy(tokens, 1, _args, 0, _args.length); return "arrivals-and-departures"; }
diff --git a/src/org/apache/fop/image/JAIImage.java b/src/org/apache/fop/image/JAIImage.java index e5e15bc62..54c288e2f 100644 --- a/src/org/apache/fop/image/JAIImage.java +++ b/src/org/apache/fop/image/JAIImage.java @@ -1,150 +1,150 @@ /* * $Id$ * Copyright (C) 2001 The Apache Software Foundation. All rights reserved. * For details on use and redistribution please refer to the * LICENSE file included with these sources. */ package org.apache.fop.image; // Java import java.net.URL; import java.io.InputStream; import java.io.BufferedInputStream; // AWT import java.awt.image.ColorModel; import java.awt.image.IndexColorModel; import java.awt.image.BufferedImage; // JAI import javax.media.jai.JAI; import javax.media.jai.RenderedOp; // Sun codec import com.sun.media.jai.codec.FileCacheSeekableStream; // FOP import org.apache.fop.datatypes.ColorSpace; import org.apache.fop.pdf.PDFColor; import org.apache.fop.image.analyser.ImageReader; /** * FopImage object using JAI. * @author Eric SCHAEFFER * @see AbstractFopImage * @see FopImage */ public class JAIImage extends AbstractFopImage { public JAIImage(URL href) { super(href); } public JAIImage(URL href, FopImage.ImageInfo imgReader) { super(href, imgReader); } protected void loadImage() { try { InputStream inputStream = this.m_href.openStream(); /* * BufferedInputStream inputStream = this.m_imageReader.getInputStream(); * inputStream.reset(); */ com.sun.media.jai.codec.FileCacheSeekableStream seekableInput = new FileCacheSeekableStream(inputStream); RenderedOp imageOp = JAI.create("stream", seekableInput); this.m_height = imageOp.getHeight(); this.m_width = imageOp.getWidth(); ColorModel cm = imageOp.getColorModel(); this.m_bitsPerPixel = 8; // this.m_bitsPerPixel = cm.getPixelSize(); this.m_colorSpace = new ColorSpace(ColorSpace.DEVICE_RGB); BufferedImage imageData = imageOp.getAsBufferedImage(); int[] tmpMap = imageData.getRGB(0, 0, this.m_width, this.m_height, null, 0, this.m_width); if (cm.hasAlpha()) { int transparencyType = cm.getTransparency(); // java.awt.Transparency. BITMASK or OPAQUE or TRANSLUCENT if (transparencyType == java.awt.Transparency.OPAQUE) { this.m_isTransparent = false; } else if (transparencyType == java.awt.Transparency.BITMASK) { if (cm instanceof IndexColorModel) { this.m_isTransparent = false; byte[] alphas = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] reds = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] greens = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] blues = new byte[ ((IndexColorModel) cm).getMapSize()]; ((IndexColorModel) cm).getAlphas(alphas); ((IndexColorModel) cm).getReds(reds); ((IndexColorModel) cm).getGreens(greens); ((IndexColorModel) cm).getBlues(blues); for (int i = 0; i < ((IndexColorModel) cm).getMapSize(); i++) { if ((alphas[i] & 0xFF) == 0) { this.m_isTransparent = true; this.m_transparentColor = new PDFColor( (int)(reds[i] & 0xFF), (int)(greens[i] & 0xFF), (int)(blues[i] & 0xFF)); break; } } } else { // TRANSLUCENT /* * this.m_isTransparent = false; * for (int i = 0; i < this.m_width * this.m_height; i++) { * if (cm.getAlpha(tmpMap[i]) == 0) { * this.m_isTransparent = true; * this.m_transparentColor = new PDFColor(cm.getRed(tmpMap[i]), cm.getGreen(tmpMap[i]), cm.getBlue(tmpMap[i])); * break; * } * } * // or use special API... */ this.m_isTransparent = false; } } else { this.m_isTransparent = false; } } else { this.m_isTransparent = false; } // Should take care of the ColorSpace and bitsPerPixel this.m_bitmapsSize = this.m_width * this.m_height * 3; this.m_bitmaps = new byte[this.m_bitmapsSize]; for (int i = 0; i < this.m_height; i++) { for (int j = 0; j < this.m_width; j++) { int p = tmpMap[i * this.m_width + j]; - int r = (p > > 16) & 0xFF; - int g = (p > > 8) & 0xFF; + int r = (p >> 16) & 0xFF; + int g = (p >> 8) & 0xFF; int b = (p) & 0xFF; this.m_bitmaps[3 * (i * this.m_width + j)] = (byte)(r & 0xFF); this.m_bitmaps[3 * (i * this.m_width + j) + 1] = (byte)(g & 0xFF); this.m_bitmaps[3 * (i * this.m_width + j) + 2] = (byte)(b & 0xFF); } } } catch (Exception ex) { /*throw new FopImageException("Error while loading image " + this.m_href.toString() + " : " + ex.getClass() + " - " + ex.getMessage()); */} } }
true
true
protected void loadImage() { try { InputStream inputStream = this.m_href.openStream(); /* * BufferedInputStream inputStream = this.m_imageReader.getInputStream(); * inputStream.reset(); */ com.sun.media.jai.codec.FileCacheSeekableStream seekableInput = new FileCacheSeekableStream(inputStream); RenderedOp imageOp = JAI.create("stream", seekableInput); this.m_height = imageOp.getHeight(); this.m_width = imageOp.getWidth(); ColorModel cm = imageOp.getColorModel(); this.m_bitsPerPixel = 8; // this.m_bitsPerPixel = cm.getPixelSize(); this.m_colorSpace = new ColorSpace(ColorSpace.DEVICE_RGB); BufferedImage imageData = imageOp.getAsBufferedImage(); int[] tmpMap = imageData.getRGB(0, 0, this.m_width, this.m_height, null, 0, this.m_width); if (cm.hasAlpha()) { int transparencyType = cm.getTransparency(); // java.awt.Transparency. BITMASK or OPAQUE or TRANSLUCENT if (transparencyType == java.awt.Transparency.OPAQUE) { this.m_isTransparent = false; } else if (transparencyType == java.awt.Transparency.BITMASK) { if (cm instanceof IndexColorModel) { this.m_isTransparent = false; byte[] alphas = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] reds = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] greens = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] blues = new byte[ ((IndexColorModel) cm).getMapSize()]; ((IndexColorModel) cm).getAlphas(alphas); ((IndexColorModel) cm).getReds(reds); ((IndexColorModel) cm).getGreens(greens); ((IndexColorModel) cm).getBlues(blues); for (int i = 0; i < ((IndexColorModel) cm).getMapSize(); i++) { if ((alphas[i] & 0xFF) == 0) { this.m_isTransparent = true; this.m_transparentColor = new PDFColor( (int)(reds[i] & 0xFF), (int)(greens[i] & 0xFF), (int)(blues[i] & 0xFF)); break; } } } else { // TRANSLUCENT /* * this.m_isTransparent = false; * for (int i = 0; i < this.m_width * this.m_height; i++) { * if (cm.getAlpha(tmpMap[i]) == 0) { * this.m_isTransparent = true; * this.m_transparentColor = new PDFColor(cm.getRed(tmpMap[i]), cm.getGreen(tmpMap[i]), cm.getBlue(tmpMap[i])); * break; * } * } * // or use special API... */ this.m_isTransparent = false; } } else { this.m_isTransparent = false; } } else { this.m_isTransparent = false; } // Should take care of the ColorSpace and bitsPerPixel this.m_bitmapsSize = this.m_width * this.m_height * 3; this.m_bitmaps = new byte[this.m_bitmapsSize]; for (int i = 0; i < this.m_height; i++) { for (int j = 0; j < this.m_width; j++) { int p = tmpMap[i * this.m_width + j]; int r = (p > > 16) & 0xFF; int g = (p > > 8) & 0xFF; int b = (p) & 0xFF; this.m_bitmaps[3 * (i * this.m_width + j)] = (byte)(r & 0xFF); this.m_bitmaps[3 * (i * this.m_width + j) + 1] = (byte)(g & 0xFF); this.m_bitmaps[3 * (i * this.m_width + j) + 2] = (byte)(b & 0xFF); } } } catch (Exception ex) { /*throw new FopImageException("Error while loading image " + this.m_href.toString() + " : " + ex.getClass() + " - " + ex.getMessage()); */} } }
protected void loadImage() { try { InputStream inputStream = this.m_href.openStream(); /* * BufferedInputStream inputStream = this.m_imageReader.getInputStream(); * inputStream.reset(); */ com.sun.media.jai.codec.FileCacheSeekableStream seekableInput = new FileCacheSeekableStream(inputStream); RenderedOp imageOp = JAI.create("stream", seekableInput); this.m_height = imageOp.getHeight(); this.m_width = imageOp.getWidth(); ColorModel cm = imageOp.getColorModel(); this.m_bitsPerPixel = 8; // this.m_bitsPerPixel = cm.getPixelSize(); this.m_colorSpace = new ColorSpace(ColorSpace.DEVICE_RGB); BufferedImage imageData = imageOp.getAsBufferedImage(); int[] tmpMap = imageData.getRGB(0, 0, this.m_width, this.m_height, null, 0, this.m_width); if (cm.hasAlpha()) { int transparencyType = cm.getTransparency(); // java.awt.Transparency. BITMASK or OPAQUE or TRANSLUCENT if (transparencyType == java.awt.Transparency.OPAQUE) { this.m_isTransparent = false; } else if (transparencyType == java.awt.Transparency.BITMASK) { if (cm instanceof IndexColorModel) { this.m_isTransparent = false; byte[] alphas = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] reds = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] greens = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] blues = new byte[ ((IndexColorModel) cm).getMapSize()]; ((IndexColorModel) cm).getAlphas(alphas); ((IndexColorModel) cm).getReds(reds); ((IndexColorModel) cm).getGreens(greens); ((IndexColorModel) cm).getBlues(blues); for (int i = 0; i < ((IndexColorModel) cm).getMapSize(); i++) { if ((alphas[i] & 0xFF) == 0) { this.m_isTransparent = true; this.m_transparentColor = new PDFColor( (int)(reds[i] & 0xFF), (int)(greens[i] & 0xFF), (int)(blues[i] & 0xFF)); break; } } } else { // TRANSLUCENT /* * this.m_isTransparent = false; * for (int i = 0; i < this.m_width * this.m_height; i++) { * if (cm.getAlpha(tmpMap[i]) == 0) { * this.m_isTransparent = true; * this.m_transparentColor = new PDFColor(cm.getRed(tmpMap[i]), cm.getGreen(tmpMap[i]), cm.getBlue(tmpMap[i])); * break; * } * } * // or use special API... */ this.m_isTransparent = false; } } else { this.m_isTransparent = false; } } else { this.m_isTransparent = false; } // Should take care of the ColorSpace and bitsPerPixel this.m_bitmapsSize = this.m_width * this.m_height * 3; this.m_bitmaps = new byte[this.m_bitmapsSize]; for (int i = 0; i < this.m_height; i++) { for (int j = 0; j < this.m_width; j++) { int p = tmpMap[i * this.m_width + j]; int r = (p >> 16) & 0xFF; int g = (p >> 8) & 0xFF; int b = (p) & 0xFF; this.m_bitmaps[3 * (i * this.m_width + j)] = (byte)(r & 0xFF); this.m_bitmaps[3 * (i * this.m_width + j) + 1] = (byte)(g & 0xFF); this.m_bitmaps[3 * (i * this.m_width + j) + 2] = (byte)(b & 0xFF); } } } catch (Exception ex) { /*throw new FopImageException("Error while loading image " + this.m_href.toString() + " : " + ex.getClass() + " - " + ex.getMessage()); */} } }